var/home/core/zuul-output/0000755000175000017500000000000015110065005014516 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015110070643015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003330401315110070635017670 0ustar rootrootNov 21 13:36:00 crc systemd[1]: Starting Kubernetes Kubelet... Nov 21 13:36:00 crc restorecon[4695]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 13:36:00 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 13:36:01 crc restorecon[4695]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 21 13:36:02 crc kubenswrapper[4721]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 13:36:02 crc kubenswrapper[4721]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 21 13:36:02 crc kubenswrapper[4721]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 13:36:02 crc kubenswrapper[4721]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 13:36:02 crc kubenswrapper[4721]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 21 13:36:02 crc kubenswrapper[4721]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.629000 4721 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638669 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638699 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638709 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638719 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638731 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638742 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638751 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638759 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638768 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638776 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638784 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638793 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638800 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638808 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638816 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638826 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638837 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638845 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638854 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638862 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638870 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638878 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638900 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638907 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638915 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638923 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638930 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638938 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638946 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638954 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638962 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638969 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638977 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638984 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.638992 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639001 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639008 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639016 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639023 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639030 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639038 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639046 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639053 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639083 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639092 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639104 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639114 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639122 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639129 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639137 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639146 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639154 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639161 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639168 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639176 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639185 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639193 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639201 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639209 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639217 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639225 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639238 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639247 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639256 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639264 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639274 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639282 4721 feature_gate.go:330] unrecognized feature gate: Example Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639290 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639298 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639306 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.639314 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639464 4721 flags.go:64] FLAG: --address="0.0.0.0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639481 4721 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639498 4721 flags.go:64] FLAG: --anonymous-auth="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639511 4721 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639523 4721 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639533 4721 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639546 4721 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639558 4721 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639567 4721 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639579 4721 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639592 4721 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639605 4721 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639616 4721 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639628 4721 flags.go:64] FLAG: --cgroup-root="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639638 4721 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639647 4721 flags.go:64] FLAG: --client-ca-file="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639656 4721 flags.go:64] FLAG: --cloud-config="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639666 4721 flags.go:64] FLAG: --cloud-provider="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639675 4721 flags.go:64] FLAG: --cluster-dns="[]" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639686 4721 flags.go:64] FLAG: --cluster-domain="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639694 4721 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639704 4721 flags.go:64] FLAG: --config-dir="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639713 4721 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639723 4721 flags.go:64] FLAG: --container-log-max-files="5" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639736 4721 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639745 4721 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639755 4721 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639764 4721 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639773 4721 flags.go:64] FLAG: --contention-profiling="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639782 4721 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639792 4721 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639801 4721 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639810 4721 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639822 4721 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639831 4721 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639840 4721 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639849 4721 flags.go:64] FLAG: --enable-load-reader="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639859 4721 flags.go:64] FLAG: --enable-server="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639869 4721 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639881 4721 flags.go:64] FLAG: --event-burst="100" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639891 4721 flags.go:64] FLAG: --event-qps="50" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639900 4721 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639909 4721 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639918 4721 flags.go:64] FLAG: --eviction-hard="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639929 4721 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639938 4721 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639947 4721 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639957 4721 flags.go:64] FLAG: --eviction-soft="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639966 4721 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639975 4721 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639984 4721 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.639993 4721 flags.go:64] FLAG: --experimental-mounter-path="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640002 4721 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640011 4721 flags.go:64] FLAG: --fail-swap-on="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640020 4721 flags.go:64] FLAG: --feature-gates="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640030 4721 flags.go:64] FLAG: --file-check-frequency="20s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640039 4721 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640049 4721 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640059 4721 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640094 4721 flags.go:64] FLAG: --healthz-port="10248" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640105 4721 flags.go:64] FLAG: --help="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640114 4721 flags.go:64] FLAG: --hostname-override="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640123 4721 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640133 4721 flags.go:64] FLAG: --http-check-frequency="20s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640142 4721 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640151 4721 flags.go:64] FLAG: --image-credential-provider-config="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640160 4721 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640168 4721 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640180 4721 flags.go:64] FLAG: --image-service-endpoint="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640188 4721 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640197 4721 flags.go:64] FLAG: --kube-api-burst="100" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640206 4721 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640215 4721 flags.go:64] FLAG: --kube-api-qps="50" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640224 4721 flags.go:64] FLAG: --kube-reserved="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640233 4721 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640242 4721 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640271 4721 flags.go:64] FLAG: --kubelet-cgroups="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640280 4721 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640289 4721 flags.go:64] FLAG: --lock-file="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640298 4721 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640306 4721 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640317 4721 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640331 4721 flags.go:64] FLAG: --log-json-split-stream="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640340 4721 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640349 4721 flags.go:64] FLAG: --log-text-split-stream="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640358 4721 flags.go:64] FLAG: --logging-format="text" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640368 4721 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640378 4721 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640387 4721 flags.go:64] FLAG: --manifest-url="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640396 4721 flags.go:64] FLAG: --manifest-url-header="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640408 4721 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640417 4721 flags.go:64] FLAG: --max-open-files="1000000" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640428 4721 flags.go:64] FLAG: --max-pods="110" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640437 4721 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640447 4721 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640456 4721 flags.go:64] FLAG: --memory-manager-policy="None" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640465 4721 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640474 4721 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640483 4721 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640493 4721 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640515 4721 flags.go:64] FLAG: --node-status-max-images="50" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640525 4721 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640534 4721 flags.go:64] FLAG: --oom-score-adj="-999" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640543 4721 flags.go:64] FLAG: --pod-cidr="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640551 4721 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640567 4721 flags.go:64] FLAG: --pod-manifest-path="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640576 4721 flags.go:64] FLAG: --pod-max-pids="-1" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640585 4721 flags.go:64] FLAG: --pods-per-core="0" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640594 4721 flags.go:64] FLAG: --port="10250" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640604 4721 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640613 4721 flags.go:64] FLAG: --provider-id="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640622 4721 flags.go:64] FLAG: --qos-reserved="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640632 4721 flags.go:64] FLAG: --read-only-port="10255" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640642 4721 flags.go:64] FLAG: --register-node="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640657 4721 flags.go:64] FLAG: --register-schedulable="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640667 4721 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640689 4721 flags.go:64] FLAG: --registry-burst="10" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640701 4721 flags.go:64] FLAG: --registry-qps="5" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640712 4721 flags.go:64] FLAG: --reserved-cpus="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640721 4721 flags.go:64] FLAG: --reserved-memory="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640734 4721 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640743 4721 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640753 4721 flags.go:64] FLAG: --rotate-certificates="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640762 4721 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640771 4721 flags.go:64] FLAG: --runonce="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640780 4721 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640790 4721 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640800 4721 flags.go:64] FLAG: --seccomp-default="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640809 4721 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640818 4721 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640828 4721 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640838 4721 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640848 4721 flags.go:64] FLAG: --storage-driver-password="root" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640857 4721 flags.go:64] FLAG: --storage-driver-secure="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640866 4721 flags.go:64] FLAG: --storage-driver-table="stats" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640875 4721 flags.go:64] FLAG: --storage-driver-user="root" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640884 4721 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640894 4721 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640903 4721 flags.go:64] FLAG: --system-cgroups="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640912 4721 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640927 4721 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640936 4721 flags.go:64] FLAG: --tls-cert-file="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640945 4721 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640956 4721 flags.go:64] FLAG: --tls-min-version="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640965 4721 flags.go:64] FLAG: --tls-private-key-file="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640975 4721 flags.go:64] FLAG: --topology-manager-policy="none" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640987 4721 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.640997 4721 flags.go:64] FLAG: --topology-manager-scope="container" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.641007 4721 flags.go:64] FLAG: --v="2" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.641020 4721 flags.go:64] FLAG: --version="false" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.641033 4721 flags.go:64] FLAG: --vmodule="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.641044 4721 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.641306 4721 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646509 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646563 4721 feature_gate.go:330] unrecognized feature gate: Example Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646572 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646581 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646591 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646599 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646609 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646620 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646631 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646640 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646648 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646657 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646664 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646672 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646680 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646688 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646695 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646704 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646711 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646719 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646727 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646734 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646742 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646750 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646761 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646774 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646784 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646792 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646801 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646809 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646819 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646829 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646838 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646845 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646853 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646861 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646869 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646876 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646884 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646894 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646904 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646913 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646922 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646933 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646941 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646951 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646960 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646971 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646981 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.646991 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647000 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647010 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647018 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647026 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647035 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647043 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647051 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647086 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647095 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647103 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647111 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647120 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647127 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647136 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647144 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647151 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647159 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647169 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647177 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647185 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.647193 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.647449 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.663445 4721 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.663503 4721 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663636 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663662 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663673 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663681 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663689 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663697 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663705 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663713 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663721 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663730 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663737 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663745 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663753 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663761 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663769 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663777 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663785 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663792 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663800 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663808 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663819 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663833 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663843 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663852 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663860 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663868 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663876 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663887 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663896 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663904 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663912 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663919 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663927 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663935 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663944 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663952 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663960 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663968 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663976 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663984 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.663992 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664000 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664007 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664015 4721 feature_gate.go:330] unrecognized feature gate: Example Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664024 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664033 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664043 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664052 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664060 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664092 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664100 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664108 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664116 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664125 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664133 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664141 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664149 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664157 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664164 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664173 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664184 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664193 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664202 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664212 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664222 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664233 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664242 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664253 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664263 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664272 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664284 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.664302 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664561 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664579 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664588 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664598 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664606 4721 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664614 4721 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664622 4721 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664631 4721 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664641 4721 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664649 4721 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664657 4721 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664664 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664672 4721 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664680 4721 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664688 4721 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664695 4721 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664703 4721 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664711 4721 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664720 4721 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664727 4721 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664735 4721 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664742 4721 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664750 4721 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664758 4721 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664766 4721 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664773 4721 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664781 4721 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664788 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664798 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664806 4721 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664814 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664821 4721 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664830 4721 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664837 4721 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664846 4721 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664854 4721 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664862 4721 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664870 4721 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664878 4721 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664886 4721 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664896 4721 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664906 4721 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664915 4721 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664923 4721 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664931 4721 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664939 4721 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664946 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664954 4721 feature_gate.go:330] unrecognized feature gate: Example Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664965 4721 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664974 4721 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664983 4721 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.664991 4721 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665000 4721 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665007 4721 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665015 4721 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665023 4721 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665033 4721 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665043 4721 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665053 4721 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665092 4721 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665103 4721 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665112 4721 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665120 4721 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665129 4721 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665137 4721 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665147 4721 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665155 4721 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665165 4721 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665173 4721 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665181 4721 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.665190 4721 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.665203 4721 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.665532 4721 server.go:940] "Client rotation is on, will bootstrap in background" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.673494 4721 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.673638 4721 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.675573 4721 server.go:997] "Starting client certificate rotation" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.675622 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.675870 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-23 11:28:11.378098155 +0000 UTC Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.676021 4721 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 45h52m8.702081661s for next certificate rotation Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.720381 4721 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.724300 4721 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.753579 4721 log.go:25] "Validated CRI v1 runtime API" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.818986 4721 log.go:25] "Validated CRI v1 image API" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.821942 4721 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.831706 4721 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-21-13-31-49-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.831758 4721 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.870618 4721 manager.go:217] Machine: {Timestamp:2025-11-21 13:36:02.865880357 +0000 UTC m=+0.843771705 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:5536c4e5-1f28-40b2-9c33-e4b399b3bfd8 BootID:d6dd8ca9-77b6-40e9-b340-d759ee2aceeb Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:70:a3:31 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:70:a3:31 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:94:a7:92 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:7a:a0:70 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:22:dd:91 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:a1:ad:9d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:a6:5a:26:93:4b:d7 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:5e:d4:38:68:ca:61 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.871241 4721 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.871524 4721 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.873557 4721 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.873908 4721 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.873974 4721 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.874382 4721 topology_manager.go:138] "Creating topology manager with none policy" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.874402 4721 container_manager_linux.go:303] "Creating device plugin manager" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.875254 4721 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.875309 4721 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.876293 4721 state_mem.go:36] "Initialized new in-memory state store" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.876456 4721 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.880937 4721 kubelet.go:418] "Attempting to sync node with API server" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.880976 4721 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.881023 4721 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.881052 4721 kubelet.go:324] "Adding apiserver pod source" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.881106 4721 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.888619 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.888629 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.888765 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.888739 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.890448 4721 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.893164 4721 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.896860 4721 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901282 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901306 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901317 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901326 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901368 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901376 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901382 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901394 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901405 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901412 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901422 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.901429 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.902603 4721 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.903394 4721 server.go:1280] "Started kubelet" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.905225 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.905354 4721 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.905388 4721 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 21 13:36:02 crc systemd[1]: Started Kubernetes Kubelet. Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.907040 4721 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.907875 4721 server.go:460] "Adding debug handlers to kubelet server" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.916134 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.916284 4721 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.917458 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 22:07:07.685690258 +0000 UTC Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.917703 4721 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.919620 4721 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.919808 4721 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.919875 4721 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.922973 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.923131 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.923976 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="200ms" Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.923407 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187a090eb8c8f5d1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-21 13:36:02.903340497 +0000 UTC m=+0.881231785,LastTimestamp:2025-11-21 13:36:02.903340497 +0000 UTC m=+0.881231785,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926261 4721 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926294 4721 factory.go:55] Registering systemd factory Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926305 4721 factory.go:221] Registration of the systemd container factory successfully Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926755 4721 factory.go:153] Registering CRI-O factory Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926776 4721 factory.go:221] Registration of the crio container factory successfully Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926802 4721 factory.go:103] Registering Raw factory Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.926823 4721 manager.go:1196] Started watching for new ooms in manager Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.927629 4721 manager.go:319] Starting recovery of all containers Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939332 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939399 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939415 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939428 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939440 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939450 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939461 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939471 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939484 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939493 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939507 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939517 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939526 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939540 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939552 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939562 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939575 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939583 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939593 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939601 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939612 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939621 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939629 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939639 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939652 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939663 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939680 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939692 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939705 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939767 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939779 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939789 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939801 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939812 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939823 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939834 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939843 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939854 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939865 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939875 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939886 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939901 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939910 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939921 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939930 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939939 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939948 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939959 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939972 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939984 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.939995 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940004 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940023 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940040 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940057 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940086 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940097 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940115 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940124 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940136 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940146 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940159 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940170 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940180 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940192 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940203 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940214 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940226 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940237 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940246 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940257 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940266 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940277 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940286 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940324 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940334 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940345 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940354 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940363 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940375 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940386 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940396 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940408 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940417 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940428 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940440 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940454 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940465 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940476 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940490 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940502 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.940512 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.943157 4721 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.943217 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944259 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944292 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944314 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944331 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944347 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944362 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944378 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944399 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944429 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944448 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944467 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944493 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944515 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944533 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944566 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944583 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944598 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944616 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944638 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944653 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944671 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944688 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944705 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944723 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944739 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944755 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944780 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944797 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944828 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944844 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944860 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944878 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944895 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944911 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944927 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944943 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944959 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944974 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.944988 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945004 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945019 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945032 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945047 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945144 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945163 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945236 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945261 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945279 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945302 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945318 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945337 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945382 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945398 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945414 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945429 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945443 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945463 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945479 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945493 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945507 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945523 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945537 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945552 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945568 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945581 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945595 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945618 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945631 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945644 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945658 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945672 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945686 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945700 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945716 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945729 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945746 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945760 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945774 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945789 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945804 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945818 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945833 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945849 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945865 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945884 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945901 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945916 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945933 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945947 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945964 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945979 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.945997 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946015 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946029 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946043 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946059 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946093 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946109 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946125 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946141 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946155 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946174 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946189 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946204 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946222 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946238 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946253 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946271 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946286 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946326 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946340 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946357 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946371 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946384 4721 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946398 4721 reconstruct.go:97] "Volume reconstruction finished" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.946408 4721 reconciler.go:26] "Reconciler: start to sync state" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.959229 4721 manager.go:324] Recovery completed Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.975984 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.977452 4721 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.978348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.978408 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.978424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.979522 4721 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.979551 4721 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.979574 4721 state_mem.go:36] "Initialized new in-memory state store" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.980235 4721 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.980313 4721 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 21 13:36:02 crc kubenswrapper[4721]: I1121 13:36:02.980363 4721 kubelet.go:2335] "Starting kubelet main sync loop" Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.980449 4721 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 21 13:36:02 crc kubenswrapper[4721]: W1121 13:36:02.982658 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:02 crc kubenswrapper[4721]: E1121 13:36:02.982737 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.018501 4721 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.036814 4721 policy_none.go:49] "None policy: Start" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.039364 4721 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.039416 4721 state_mem.go:35] "Initializing new in-memory state store" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.080762 4721 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.115975 4721 manager.go:334] "Starting Device Plugin manager" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.116144 4721 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.116170 4721 server.go:79] "Starting device plugin registration server" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.116751 4721 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.116779 4721 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.117121 4721 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.117262 4721 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.117284 4721 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.125675 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="400ms" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.131598 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.217446 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.218968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.219011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.219023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.219054 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.219694 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.281647 4721 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.281811 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.283742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.283808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.283828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.284059 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.284619 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.284732 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.285603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.285649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.285666 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.285918 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.286179 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.286235 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287799 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287843 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.287913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.288368 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.288452 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289919 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.289996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.290102 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.290177 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291359 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291398 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291452 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.291490 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.292655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.292705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.292730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350433 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350501 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350566 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350615 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350848 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350928 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.350970 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351002 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351113 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351187 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351269 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351351 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351414 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351482 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.351570 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.420377 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.422016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.422114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.422137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.422178 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.422849 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453276 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453357 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453415 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453464 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453513 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453505 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453555 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453594 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453615 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453534 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453752 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453872 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453950 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453955 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.453954 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454104 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454147 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454102 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454278 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454327 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454351 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454402 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454441 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454490 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454498 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454617 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454683 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454695 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.454728 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.526617 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="800ms" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.621056 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.633526 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.655741 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.677966 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.688503 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:03 crc kubenswrapper[4721]: W1121 13:36:03.703341 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-31eecbfe7c79c96549e94bc890083b05a4cc3d8ee4fe5c5a01f5c2fbef9197c6 WatchSource:0}: Error finding container 31eecbfe7c79c96549e94bc890083b05a4cc3d8ee4fe5c5a01f5c2fbef9197c6: Status 404 returned error can't find the container with id 31eecbfe7c79c96549e94bc890083b05a4cc3d8ee4fe5c5a01f5c2fbef9197c6 Nov 21 13:36:03 crc kubenswrapper[4721]: W1121 13:36:03.704892 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-d310835f00b61a90ea409f6709999cea406d9dd03b91289f720e31d3b1990c4b WatchSource:0}: Error finding container d310835f00b61a90ea409f6709999cea406d9dd03b91289f720e31d3b1990c4b: Status 404 returned error can't find the container with id d310835f00b61a90ea409f6709999cea406d9dd03b91289f720e31d3b1990c4b Nov 21 13:36:03 crc kubenswrapper[4721]: W1121 13:36:03.712793 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-7e15baac1ba00b055eb3a997e7458ba3d8e153ff99242b6dade60b8e573f5eea WatchSource:0}: Error finding container 7e15baac1ba00b055eb3a997e7458ba3d8e153ff99242b6dade60b8e573f5eea: Status 404 returned error can't find the container with id 7e15baac1ba00b055eb3a997e7458ba3d8e153ff99242b6dade60b8e573f5eea Nov 21 13:36:03 crc kubenswrapper[4721]: W1121 13:36:03.714714 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-96a4784aeffa8f902730e13998dcda7d66b75dcb8ceb5d16f666e9e3fe72607f WatchSource:0}: Error finding container 96a4784aeffa8f902730e13998dcda7d66b75dcb8ceb5d16f666e9e3fe72607f: Status 404 returned error can't find the container with id 96a4784aeffa8f902730e13998dcda7d66b75dcb8ceb5d16f666e9e3fe72607f Nov 21 13:36:03 crc kubenswrapper[4721]: W1121 13:36:03.715440 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c749b7c08f5f3a62f7371085094cdf819d232c2c51e49624a73bbf6450e0b9da WatchSource:0}: Error finding container c749b7c08f5f3a62f7371085094cdf819d232c2c51e49624a73bbf6450e0b9da: Status 404 returned error can't find the container with id c749b7c08f5f3a62f7371085094cdf819d232c2c51e49624a73bbf6450e0b9da Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.823317 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.825429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.825508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.825528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.825579 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.826392 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.906755 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.921616 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 12:15:05.199824888 +0000 UTC Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.921688 4721 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 502h39m1.278138488s for next certificate rotation Nov 21 13:36:03 crc kubenswrapper[4721]: W1121 13:36:03.936422 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:03 crc kubenswrapper[4721]: E1121 13:36:03.936519 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.987377 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"96a4784aeffa8f902730e13998dcda7d66b75dcb8ceb5d16f666e9e3fe72607f"} Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.988658 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c749b7c08f5f3a62f7371085094cdf819d232c2c51e49624a73bbf6450e0b9da"} Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.989904 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7e15baac1ba00b055eb3a997e7458ba3d8e153ff99242b6dade60b8e573f5eea"} Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.991641 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d310835f00b61a90ea409f6709999cea406d9dd03b91289f720e31d3b1990c4b"} Nov 21 13:36:03 crc kubenswrapper[4721]: I1121 13:36:03.993179 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"31eecbfe7c79c96549e94bc890083b05a4cc3d8ee4fe5c5a01f5c2fbef9197c6"} Nov 21 13:36:04 crc kubenswrapper[4721]: W1121 13:36:04.101581 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:04 crc kubenswrapper[4721]: E1121 13:36:04.101703 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:04 crc kubenswrapper[4721]: W1121 13:36:04.107845 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:04 crc kubenswrapper[4721]: E1121 13:36:04.107927 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:04 crc kubenswrapper[4721]: W1121 13:36:04.198567 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:04 crc kubenswrapper[4721]: E1121 13:36:04.198745 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:04 crc kubenswrapper[4721]: E1121 13:36:04.327816 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="1.6s" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.627389 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.629597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.629647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.629661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.629691 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:04 crc kubenswrapper[4721]: E1121 13:36:04.630318 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.907266 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.997741 4721 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773" exitCode=0 Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.997837 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773"} Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.997948 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.999530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.999595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:04 crc kubenswrapper[4721]: I1121 13:36:04.999616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.000649 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae"} Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.000680 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c"} Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.003304 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760" exitCode=0 Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.003395 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760"} Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.003443 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.004943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.004965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.004974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.009666 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1ed71834068b6ca150bdaacb41a8da68bfcb3e66978d8435a1e960c180541cfb" exitCode=0 Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.009953 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.009958 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1ed71834068b6ca150bdaacb41a8da68bfcb3e66978d8435a1e960c180541cfb"} Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.009957 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.011445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.011488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.011503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.011480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.011619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.011649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.012170 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d" exitCode=0 Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.012218 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d"} Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.012263 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.013217 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.013257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.013270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:05 crc kubenswrapper[4721]: I1121 13:36:05.907418 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:05 crc kubenswrapper[4721]: E1121 13:36:05.928932 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="3.2s" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.018420 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964"} Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.022525 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355"} Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.022605 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd"} Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.025181 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181"} Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.029020 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3c129bdaf147114762b656312c890fd69d53beb8c9018e34960e79f61b387d52" exitCode=0 Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.029136 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3c129bdaf147114762b656312c890fd69d53beb8c9018e34960e79f61b387d52"} Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.029278 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.031199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.031280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.031299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.032677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"785574da7816b494bfa29c702e49fb8d054908c7643b58b9cab04443d7f4044c"} Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.032750 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.034465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.034504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.034516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:06 crc kubenswrapper[4721]: W1121 13:36:06.053984 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:06 crc kubenswrapper[4721]: E1121 13:36:06.054158 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:06 crc kubenswrapper[4721]: W1121 13:36:06.096508 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:06 crc kubenswrapper[4721]: E1121 13:36:06.096629 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.231345 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.232795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.232841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.232854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.232882 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:06 crc kubenswrapper[4721]: E1121 13:36:06.233415 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Nov 21 13:36:06 crc kubenswrapper[4721]: W1121 13:36:06.688605 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:06 crc kubenswrapper[4721]: E1121 13:36:06.688800 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:06 crc kubenswrapper[4721]: I1121 13:36:06.906178 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.038633 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9"} Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.041900 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335"} Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.044365 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3e5f1e50f8ab43e2d323d076a3c84e82f8130a8c91f55840c5b92554b60b6fec"} Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.044471 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.044479 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.045818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.045849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.045861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.045904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.045921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.045930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:07 crc kubenswrapper[4721]: W1121 13:36:07.393010 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:07 crc kubenswrapper[4721]: E1121 13:36:07.393227 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:07 crc kubenswrapper[4721]: E1121 13:36:07.573572 4721 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187a090eb8c8f5d1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-21 13:36:02.903340497 +0000 UTC m=+0.881231785,LastTimestamp:2025-11-21 13:36:02.903340497 +0000 UTC m=+0.881231785,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 21 13:36:07 crc kubenswrapper[4721]: I1121 13:36:07.907286 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.047385 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.048766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.048803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.048815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.907470 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.962271 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.962634 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.963781 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.963883 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.964315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.964362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:08 crc kubenswrapper[4721]: I1121 13:36:08.964374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.057255 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.057255 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689"} Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.058709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.058760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.058782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.061993 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85"} Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.065961 4721 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3e5f1e50f8ab43e2d323d076a3c84e82f8130a8c91f55840c5b92554b60b6fec" exitCode=0 Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.066023 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3e5f1e50f8ab43e2d323d076a3c84e82f8130a8c91f55840c5b92554b60b6fec"} Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.066297 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.067633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.067689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.067712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:09 crc kubenswrapper[4721]: E1121 13:36:09.130947 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="6.4s" Nov 21 13:36:09 crc kubenswrapper[4721]: W1121 13:36:09.279230 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:09 crc kubenswrapper[4721]: E1121 13:36:09.279584 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.434049 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.436264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.436394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.436418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.436472 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:09 crc kubenswrapper[4721]: E1121 13:36:09.437386 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.621210 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.621436 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.623293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.623358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.623372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:09 crc kubenswrapper[4721]: I1121 13:36:09.906928 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:10 crc kubenswrapper[4721]: W1121 13:36:10.019965 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:10 crc kubenswrapper[4721]: E1121 13:36:10.020180 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.075342 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1c73455ff33c34c0c280359a02c410ae10556c78c85d80e125a705ae3c75c712"} Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.081862 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8"} Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.081957 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.082124 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.083193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.083250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.083268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:10 crc kubenswrapper[4721]: I1121 13:36:10.906705 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.086706 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"052ca9dcbf932801acf450d9d3529036a75e54275100a761e94b2ab19113a958"} Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.086748 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"db7877cdbd3d4955261696847f4a1d5d755fffe418891bec46f172d3adb00fc3"} Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.086758 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a7864d7d180cdf0f73fb6446e4f0aad20dab3423dc0a8e4757ce36b6ff00e473"} Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.089158 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.089205 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.089348 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c2fa573c1ffad17ab07f0203a7ba9a473087f0d1aaf4bc94d1a1005546e8bb80"} Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.089866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.089894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.089902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.090315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.090333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.090341 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.182141 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.182444 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.184852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.184908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.184919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:11 crc kubenswrapper[4721]: I1121 13:36:11.907194 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.093486 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.095567 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c2fa573c1ffad17ab07f0203a7ba9a473087f0d1aaf4bc94d1a1005546e8bb80" exitCode=255 Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.095637 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c2fa573c1ffad17ab07f0203a7ba9a473087f0d1aaf4bc94d1a1005546e8bb80"} Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.095798 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.097277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.097336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.097356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.098371 4721 scope.go:117] "RemoveContainer" containerID="c2fa573c1ffad17ab07f0203a7ba9a473087f0d1aaf4bc94d1a1005546e8bb80" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.104871 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ab051797c42a296d08ce3b040da622a044f8438ff4533779139f753e886a95b2"} Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.104976 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.106174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.106249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.106263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:12 crc kubenswrapper[4721]: I1121 13:36:12.311983 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.111844 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.115355 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d"} Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.115439 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.115660 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.116638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.116687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.116705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.116951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.117007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:13 crc kubenswrapper[4721]: I1121 13:36:13.117028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:13 crc kubenswrapper[4721]: E1121 13:36:13.132725 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.104833 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.117649 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.117812 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.119044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.119174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.119195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.229945 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.230335 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.232314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.232390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.232409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.253286 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.473671 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.473895 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.475747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.475867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:14 crc kubenswrapper[4721]: I1121 13:36:14.475891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.120683 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.120732 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.122304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.122351 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.122370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.122428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.122473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.122492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.837605 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.839522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.839566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.839578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:15 crc kubenswrapper[4721]: I1121 13:36:15.839602 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:16 crc kubenswrapper[4721]: I1121 13:36:16.067145 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:16 crc kubenswrapper[4721]: I1121 13:36:16.123145 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:16 crc kubenswrapper[4721]: I1121 13:36:16.124482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:16 crc kubenswrapper[4721]: I1121 13:36:16.124523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:16 crc kubenswrapper[4721]: I1121 13:36:16.124536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:17 crc kubenswrapper[4721]: I1121 13:36:17.473987 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 21 13:36:17 crc kubenswrapper[4721]: I1121 13:36:17.474154 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 13:36:18 crc kubenswrapper[4721]: I1121 13:36:18.991745 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:18 crc kubenswrapper[4721]: I1121 13:36:18.991958 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:18 crc kubenswrapper[4721]: I1121 13:36:18.993794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:18 crc kubenswrapper[4721]: I1121 13:36:18.993877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:18 crc kubenswrapper[4721]: I1121 13:36:18.993902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:18 crc kubenswrapper[4721]: I1121 13:36:18.997642 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:19 crc kubenswrapper[4721]: I1121 13:36:19.132148 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:19 crc kubenswrapper[4721]: I1121 13:36:19.139374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:19 crc kubenswrapper[4721]: I1121 13:36:19.139457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:19 crc kubenswrapper[4721]: I1121 13:36:19.139475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:22 crc kubenswrapper[4721]: I1121 13:36:22.313220 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 21 13:36:22 crc kubenswrapper[4721]: I1121 13:36:22.313285 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 21 13:36:22 crc kubenswrapper[4721]: W1121 13:36:22.373775 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 21 13:36:22 crc kubenswrapper[4721]: I1121 13:36:22.373880 4721 trace.go:236] Trace[8126466]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Nov-2025 13:36:12.372) (total time: 10001ms): Nov 21 13:36:22 crc kubenswrapper[4721]: Trace[8126466]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:36:22.373) Nov 21 13:36:22 crc kubenswrapper[4721]: Trace[8126466]: [10.00166026s] [10.00166026s] END Nov 21 13:36:22 crc kubenswrapper[4721]: E1121 13:36:22.373906 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 21 13:36:22 crc kubenswrapper[4721]: W1121 13:36:22.552047 4721 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 21 13:36:22 crc kubenswrapper[4721]: I1121 13:36:22.552205 4721 trace.go:236] Trace[535008662]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Nov-2025 13:36:12.549) (total time: 10002ms): Nov 21 13:36:22 crc kubenswrapper[4721]: Trace[535008662]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (13:36:22.552) Nov 21 13:36:22 crc kubenswrapper[4721]: Trace[535008662]: [10.002578215s] [10.002578215s] END Nov 21 13:36:22 crc kubenswrapper[4721]: E1121 13:36:22.552238 4721 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 21 13:36:22 crc kubenswrapper[4721]: I1121 13:36:22.908314 4721 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 21 13:36:23 crc kubenswrapper[4721]: E1121 13:36:23.133073 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 13:36:23 crc kubenswrapper[4721]: I1121 13:36:23.304382 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 21 13:36:23 crc kubenswrapper[4721]: I1121 13:36:23.304481 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 21 13:36:23 crc kubenswrapper[4721]: I1121 13:36:23.312576 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 21 13:36:23 crc kubenswrapper[4721]: I1121 13:36:23.312738 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.110865 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]log ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]etcd ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-api-request-count-filter ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-startkubeinformers ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/generic-apiserver-start-informers ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/priority-and-fairness-config-consumer ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/priority-and-fairness-filter ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-apiextensions-informers ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-apiextensions-controllers ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/crd-informer-synced ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-system-namespaces-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-cluster-authentication-info-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-legacy-token-tracking-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-service-ip-repair-controllers ok Nov 21 13:36:24 crc kubenswrapper[4721]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Nov 21 13:36:24 crc kubenswrapper[4721]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/priority-and-fairness-config-producer ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/bootstrap-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/start-kube-aggregator-informers ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-status-local-available-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-status-remote-available-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-registration-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-wait-for-first-sync ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-discovery-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/kube-apiserver-autoregistration ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]autoregister-completion ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-openapi-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: [+]poststarthook/apiservice-openapiv3-controller ok Nov 21 13:36:24 crc kubenswrapper[4721]: livez check failed Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.110960 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.353454 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.353639 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.354844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.354867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.354877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:24 crc kubenswrapper[4721]: I1121 13:36:24.368975 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 21 13:36:25 crc kubenswrapper[4721]: I1121 13:36:25.150222 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:25 crc kubenswrapper[4721]: I1121 13:36:25.151864 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:25 crc kubenswrapper[4721]: I1121 13:36:25.151929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:25 crc kubenswrapper[4721]: I1121 13:36:25.151947 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:27 crc kubenswrapper[4721]: I1121 13:36:27.474420 4721 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 21 13:36:27 crc kubenswrapper[4721]: I1121 13:36:27.474510 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 13:36:28 crc kubenswrapper[4721]: E1121 13:36:28.310876 4721 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="7s" Nov 21 13:36:28 crc kubenswrapper[4721]: I1121 13:36:28.313228 4721 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 21 13:36:28 crc kubenswrapper[4721]: I1121 13:36:28.330512 4721 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 21 13:36:28 crc kubenswrapper[4721]: E1121 13:36:28.331343 4721 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 21 13:36:28 crc kubenswrapper[4721]: I1121 13:36:28.339044 4721 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.112662 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.112922 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.113517 4721 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.113612 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.114629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.114683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.114696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.120289 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.165997 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.166716 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.169803 4721 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d" exitCode=255 Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.169906 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d"} Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.170263 4721 scope.go:117] "RemoveContainer" containerID="c2fa573c1ffad17ab07f0203a7ba9a473087f0d1aaf4bc94d1a1005546e8bb80" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.169933 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.171877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.171920 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.171938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:29 crc kubenswrapper[4721]: I1121 13:36:29.172968 4721 scope.go:117] "RemoveContainer" containerID="ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d" Nov 21 13:36:29 crc kubenswrapper[4721]: E1121 13:36:29.173419 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 21 13:36:30 crc kubenswrapper[4721]: I1121 13:36:30.176410 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 21 13:36:30 crc kubenswrapper[4721]: I1121 13:36:30.180479 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:30 crc kubenswrapper[4721]: I1121 13:36:30.181732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:30 crc kubenswrapper[4721]: I1121 13:36:30.181838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:30 crc kubenswrapper[4721]: I1121 13:36:30.181918 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:30 crc kubenswrapper[4721]: I1121 13:36:30.182769 4721 scope.go:117] "RemoveContainer" containerID="ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d" Nov 21 13:36:30 crc kubenswrapper[4721]: E1121 13:36:30.183059 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 21 13:36:31 crc kubenswrapper[4721]: I1121 13:36:31.166863 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:31 crc kubenswrapper[4721]: I1121 13:36:31.183815 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:31 crc kubenswrapper[4721]: I1121 13:36:31.185176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:31 crc kubenswrapper[4721]: I1121 13:36:31.185418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:31 crc kubenswrapper[4721]: I1121 13:36:31.185592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:31 crc kubenswrapper[4721]: I1121 13:36:31.187431 4721 scope.go:117] "RemoveContainer" containerID="ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d" Nov 21 13:36:31 crc kubenswrapper[4721]: E1121 13:36:31.187955 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 21 13:36:32 crc kubenswrapper[4721]: I1121 13:36:32.591521 4721 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 21 13:36:33 crc kubenswrapper[4721]: E1121 13:36:33.133195 4721 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.404445 4721 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.480946 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.486044 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.898458 4721 apiserver.go:52] "Watching apiserver" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.907215 4721 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.907985 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-machine-config-operator/machine-config-daemon-mstbk","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-operator/iptables-alerter-4ln5h","openshift-dns/node-resolver-tv49z","openshift-multus/multus-hp6ft","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-ovn-kubernetes/ovnkube-node-x6mh9","openshift-multus/multus-additional-cni-plugins-x4hgm"] Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.908558 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.908907 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:34 crc kubenswrapper[4721]: E1121 13:36:34.909036 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.909146 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.909479 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:34 crc kubenswrapper[4721]: E1121 13:36:34.909844 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.910468 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.911267 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:34 crc kubenswrapper[4721]: E1121 13:36:34.911533 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.911844 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.911989 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.912201 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.912204 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.912381 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.913019 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.913161 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.913408 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.914506 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.914897 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.916646 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.917463 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.918462 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.919179 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.919371 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.920559 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.920790 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.921909 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.922489 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.922570 4721 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.922761 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.922997 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.922995 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923154 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923318 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923416 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923490 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923634 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923737 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.923930 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.925292 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.925321 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.925340 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.925296 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.925929 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.926460 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.926810 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.957398 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978385 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978435 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978473 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978503 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978526 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978549 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978572 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978595 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978622 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978649 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978670 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978692 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978716 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978736 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978759 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978804 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978830 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978878 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978900 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978928 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978952 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978974 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.978995 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979017 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979041 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979084 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979108 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979132 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979156 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979179 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979205 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979229 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979256 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979283 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979307 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979330 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979352 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979374 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979395 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979416 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979438 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979460 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979490 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979515 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979538 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979562 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979585 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979607 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979632 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979658 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979681 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979702 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979724 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979747 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979773 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979794 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979817 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979840 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979865 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979886 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979912 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979935 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979958 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979981 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980017 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980041 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980090 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980121 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980146 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980168 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980195 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980219 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980243 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980268 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980291 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980317 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980341 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980366 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980393 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980419 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980446 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980533 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980560 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980582 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980609 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980634 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980660 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980684 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980708 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980732 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980758 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980782 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980806 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980831 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980856 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980883 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980909 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980933 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980956 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981058 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981148 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981172 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981199 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981223 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981249 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981272 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981296 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981324 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981349 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981376 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981403 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981426 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981556 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981583 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981609 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981634 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981660 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981686 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981709 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981735 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981764 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981793 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981819 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981846 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981870 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981896 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981920 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981944 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981973 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981998 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982024 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982050 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982175 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982202 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982244 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982279 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982304 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982328 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982355 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982380 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982404 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982429 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982454 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982478 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982504 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982528 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982555 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982582 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982607 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982631 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982654 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982678 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982702 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982729 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982754 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982779 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982806 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982829 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982855 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982880 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982909 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982939 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982964 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982995 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983028 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983054 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983103 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983127 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983152 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983176 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983204 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983232 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983260 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983285 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983314 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983340 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983372 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983400 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983428 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983456 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983486 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983512 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983538 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983565 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983592 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983617 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983642 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983669 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983694 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983725 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983751 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983775 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983809 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983834 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983909 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-cni-multus\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983951 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983984 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fe3943c5-1349-4275-bc45-25db418c7488-rootfs\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.984009 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-system-cni-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.984031 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-os-release\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985439 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-log-socket\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985478 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-cnibin\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985504 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-hostroot\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985532 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-multus-certs\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985558 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-ovn-kubernetes\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985582 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovn-node-metrics-cert\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985604 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-cni-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985632 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-systemd\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985657 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985687 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fe3943c5-1349-4275-bc45-25db418c7488-proxy-tls\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985719 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985748 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwv9f\" (UniqueName: \"kubernetes.io/projected/15670f4b-cd59-4040-b6d5-87a7cf51d63f-kube-api-access-pwv9f\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985773 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzjfx\" (UniqueName: \"kubernetes.io/projected/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-kube-api-access-xzjfx\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985801 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc54c\" (UniqueName: \"kubernetes.io/projected/4dd2533b-aa3f-44c0-aff1-3a13c844ad5a-kube-api-access-cc54c\") pod \"node-resolver-tv49z\" (UID: \"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\") " pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985825 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/553d849b-ea1f-436e-aa8f-190203f6ab6e-cni-binary-copy\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985849 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-slash\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985871 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-bin\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985895 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-env-overrides\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985924 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985951 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-cnibin\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985976 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/553d849b-ea1f-436e-aa8f-190203f6ab6e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986005 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-systemd-units\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986031 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986058 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986108 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986137 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986165 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-var-lib-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986197 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fe3943c5-1349-4275-bc45-25db418c7488-mcd-auth-proxy-config\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986265 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-kubelet\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986307 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-kubelet\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986341 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-conf-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986375 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bhjb\" (UniqueName: \"kubernetes.io/projected/553d849b-ea1f-436e-aa8f-190203f6ab6e-kube-api-access-4bhjb\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986418 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986454 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-cni-bin\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986494 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986531 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-config\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986566 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-daemon-config\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986601 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-system-cni-dir\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986735 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4dd2533b-aa3f-44c0-aff1-3a13c844ad5a-hosts-file\") pod \"node-resolver-tv49z\" (UID: \"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\") " pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.986771 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-socket-dir-parent\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987684 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-ovn\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987721 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-node-log\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987756 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987798 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987831 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sbp6\" (UniqueName: \"kubernetes.io/projected/fe3943c5-1349-4275-bc45-25db418c7488-kube-api-access-9sbp6\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987854 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-k8s-cni-cncf-io\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987912 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987936 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-netd\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987963 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-script-lib\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.987990 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988013 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-os-release\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988035 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/15670f4b-cd59-4040-b6d5-87a7cf51d63f-cni-binary-copy\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988058 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-etc-kubernetes\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988110 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988139 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-netns\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988168 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988194 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-netns\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.988220 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-etc-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979527 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.979797 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980652 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980677 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980699 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980804 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980850 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980968 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.980982 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981250 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981300 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981334 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981344 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981713 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981722 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981752 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.991791 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981779 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981801 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982010 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982020 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.981921 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982162 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982357 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982367 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982822 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982831 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.982986 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983327 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.983363 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.984716 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.984849 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985252 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985163 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:34 crc kubenswrapper[4721]: I1121 13:36:34.985657 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.986179 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.987260 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.987401 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.988563 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.988759 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.988866 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.989045 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.989283 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.989684 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.990701 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.991356 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992205 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992315 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992440 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:34.992783 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:36:35.492748931 +0000 UTC m=+33.470640199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992778 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992870 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992864 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.991788 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.992948 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993104 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993146 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993415 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993446 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993477 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.993661 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.994305 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.994343 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.994599 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.994694 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.994770 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.994948 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.995117 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.995135 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.995876 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.995903 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996136 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996147 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996196 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996248 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996309 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996534 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.996550 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.997500 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.997587 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998359 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998523 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998697 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998741 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998878 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998915 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.998909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.978960 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.999194 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.999443 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:34.999916 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.000307 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.000671 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.000965 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.001008 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.001288 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.002939 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.003264 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.003350 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.003388 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.003511 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.004118 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.004653 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.004681 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.005666 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.005974 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.006271 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.006322 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.006398 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.006602 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.007146 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.007441 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.007762 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.008051 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.008381 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.008584 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.009595 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.010124 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.012441 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.012702 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.012794 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:35.512765477 +0000 UTC m=+33.490656955 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.012841 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.012932 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:35.512908431 +0000 UTC m=+33.490799889 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.013953 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.014017 4721 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.020765 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.022701 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.031773 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.031934 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.031944 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.031952 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.032111 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.032655 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.032751 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.032782 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.032801 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.032889 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:35.532861905 +0000 UTC m=+33.510753373 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.033057 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.033090 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.033102 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.033154 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:35.533137092 +0000 UTC m=+33.511028350 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.033463 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.033468 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.033904 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034040 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034200 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034190 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034687 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034734 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034791 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.034700 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.035866 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.036315 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.037292 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.036688 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.036420 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.036606 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.036796 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.036915 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.037026 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.037036 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.037756 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.038115 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.038565 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.039651 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.040158 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.040897 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.045454 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.045814 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.046783 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.047405 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.047567 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.047677 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.047681 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.047761 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.048293 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.048444 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.048536 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.048538 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.049006 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.049040 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.049050 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.049186 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.049149 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.049724 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.050276 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.050314 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.050351 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.050645 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.051465 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.054489 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.054571 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.055123 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.055355 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.055606 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.055869 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.056136 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.056238 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.056400 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.056677 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.056763 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.056993 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.057266 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.057365 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.060087 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.060207 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.060365 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.060882 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.060922 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061480 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061628 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061735 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061846 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061958 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061991 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062014 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062048 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062007 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.061340 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062113 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062540 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062696 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.062713 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.064112 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.067434 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.073502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.074396 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.074724 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.075202 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.077887 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.080854 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.081944 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.083535 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.084857 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.085193 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.087640 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-netns\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089202 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-etc-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089214 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089225 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-cni-multus\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089264 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fe3943c5-1349-4275-bc45-25db418c7488-rootfs\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089286 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-netns\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089290 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-system-cni-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089349 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-os-release\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089373 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-log-socket\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089396 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-cnibin\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089415 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-hostroot\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-multus-certs\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089457 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-ovn-kubernetes\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089481 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovn-node-metrics-cert\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089503 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-cni-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089526 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-systemd\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089550 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089581 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fe3943c5-1349-4275-bc45-25db418c7488-proxy-tls\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089598 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-system-cni-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwv9f\" (UniqueName: \"kubernetes.io/projected/15670f4b-cd59-4040-b6d5-87a7cf51d63f-kube-api-access-pwv9f\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089639 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzjfx\" (UniqueName: \"kubernetes.io/projected/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-kube-api-access-xzjfx\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089665 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc54c\" (UniqueName: \"kubernetes.io/projected/4dd2533b-aa3f-44c0-aff1-3a13c844ad5a-kube-api-access-cc54c\") pod \"node-resolver-tv49z\" (UID: \"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\") " pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089691 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/553d849b-ea1f-436e-aa8f-190203f6ab6e-cni-binary-copy\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089715 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-slash\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089757 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-bin\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-env-overrides\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089825 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-cnibin\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089852 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/553d849b-ea1f-436e-aa8f-190203f6ab6e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089880 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-systemd-units\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089893 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-os-release\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089906 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-log-socket\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089951 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089978 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-cnibin\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.089978 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090018 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-var-lib-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090043 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fe3943c5-1349-4275-bc45-25db418c7488-mcd-auth-proxy-config\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090083 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-kubelet\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090109 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-kubelet\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-conf-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090158 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bhjb\" (UniqueName: \"kubernetes.io/projected/553d849b-ea1f-436e-aa8f-190203f6ab6e-kube-api-access-4bhjb\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090186 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-cni-bin\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090219 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-config\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090239 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-daemon-config\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090258 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-system-cni-dir\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090277 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4dd2533b-aa3f-44c0-aff1-3a13c844ad5a-hosts-file\") pod \"node-resolver-tv49z\" (UID: \"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\") " pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090314 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-socket-dir-parent\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090335 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-ovn\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090352 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-node-log\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090382 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sbp6\" (UniqueName: \"kubernetes.io/projected/fe3943c5-1349-4275-bc45-25db418c7488-kube-api-access-9sbp6\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090400 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-k8s-cni-cncf-io\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090425 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090443 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-netd\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090462 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-script-lib\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090493 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-os-release\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090511 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/15670f4b-cd59-4040-b6d5-87a7cf51d63f-cni-binary-copy\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090541 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-etc-kubernetes\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090573 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-netns\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090641 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090663 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090678 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090693 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090707 4721 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090711 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-etc-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090723 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090677 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-tuning-conf-dir\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090935 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.090975 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-cni-multus\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091036 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fe3943c5-1349-4275-bc45-25db418c7488-rootfs\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091051 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-hostroot\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091081 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-conf-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091106 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091297 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-multus-certs\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091332 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-ovn-kubernetes\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091329 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-netd\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091606 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-run-k8s-cni-cncf-io\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091636 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-cni-bin\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091658 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.091799 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4dd2533b-aa3f-44c0-aff1-3a13c844ad5a-hosts-file\") pod \"node-resolver-tv49z\" (UID: \"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\") " pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.092022 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-os-release\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.092144 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-socket-dir-parent\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.092636 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-node-log\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.092692 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-ovn\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.092718 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-etc-kubernetes\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.092712 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/553d849b-ea1f-436e-aa8f-190203f6ab6e-system-cni-dir\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.093400 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-systemd\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094033 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-script-lib\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094682 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/15670f4b-cd59-4040-b6d5-87a7cf51d63f-cni-binary-copy\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094719 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-netns\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094740 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-systemd-units\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094784 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-bin\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094819 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094856 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-var-lib-openvswitch\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094885 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-kubelet\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094951 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-cnibin\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095025 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-cni-dir\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095078 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-slash\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095125 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095150 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095166 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095180 4721 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095194 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095206 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095221 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095235 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095248 4721 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095262 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095275 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095289 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095303 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095317 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095331 4721 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095344 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095359 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095622 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.094891 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/553d849b-ea1f-436e-aa8f-190203f6ab6e-cni-binary-copy\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.095867 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/15670f4b-cd59-4040-b6d5-87a7cf51d63f-multus-daemon-config\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.096887 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-config\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097087 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-env-overrides\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097148 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097325 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fe3943c5-1349-4275-bc45-25db418c7488-mcd-auth-proxy-config\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097371 4721 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097396 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097411 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097427 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097442 4721 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097458 4721 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097471 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097484 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097498 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097509 4721 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097522 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097535 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097548 4721 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097561 4721 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097574 4721 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097632 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/15670f4b-cd59-4040-b6d5-87a7cf51d63f-host-var-lib-kubelet\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097710 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097733 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097750 4721 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097809 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097827 4721 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097870 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097839 4721 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097922 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.097969 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098007 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098076 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098093 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098106 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098141 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098158 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098173 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098186 4721 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098220 4721 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098235 4721 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098249 4721 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098262 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098313 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098327 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098338 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098353 4721 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098366 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.098615 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099115 4721 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099184 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099203 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099243 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099261 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099699 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099724 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.100241 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.100281 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.100296 4721 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.100314 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102005 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102037 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102054 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102174 4721 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102196 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102210 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102225 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102261 4721 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102279 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102335 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102352 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102366 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102384 4721 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102397 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102430 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102443 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102458 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102471 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102508 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102522 4721 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102535 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102472 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102549 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102770 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102787 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102801 4721 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102815 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102828 4721 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102840 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102852 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102864 4721 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102880 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102892 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102903 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102915 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102928 4721 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102939 4721 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102949 4721 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102961 4721 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102972 4721 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102984 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.102995 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103006 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103017 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103028 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103039 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103050 4721 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103078 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103090 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103101 4721 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103112 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103123 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103138 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103149 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103162 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103173 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103184 4721 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103195 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103209 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103222 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103237 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103248 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103482 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fe3943c5-1349-4275-bc45-25db418c7488-proxy-tls\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.099859 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103767 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103860 4721 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103878 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103892 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103905 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103917 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103928 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103940 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103957 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103970 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103980 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.103992 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104003 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104014 4721 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104027 4721 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104039 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104050 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104125 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104141 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104152 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104163 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104173 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104184 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104194 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104204 4721 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104219 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104230 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104241 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104253 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104263 4721 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104274 4721 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104284 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104295 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104306 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104316 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104326 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104339 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104349 4721 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104360 4721 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104371 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104387 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104400 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104411 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104422 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104435 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104446 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104459 4721 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104473 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104487 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104499 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104509 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104520 4721 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104531 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104543 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.104555 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.105218 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/553d849b-ea1f-436e-aa8f-190203f6ab6e-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.106040 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovn-node-metrics-cert\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.108090 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzjfx\" (UniqueName: \"kubernetes.io/projected/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-kube-api-access-xzjfx\") pod \"ovnkube-node-x6mh9\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.109822 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bhjb\" (UniqueName: \"kubernetes.io/projected/553d849b-ea1f-436e-aa8f-190203f6ab6e-kube-api-access-4bhjb\") pod \"multus-additional-cni-plugins-x4hgm\" (UID: \"553d849b-ea1f-436e-aa8f-190203f6ab6e\") " pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.111116 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.111614 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sbp6\" (UniqueName: \"kubernetes.io/projected/fe3943c5-1349-4275-bc45-25db418c7488-kube-api-access-9sbp6\") pod \"machine-config-daemon-mstbk\" (UID: \"fe3943c5-1349-4275-bc45-25db418c7488\") " pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.112828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwv9f\" (UniqueName: \"kubernetes.io/projected/15670f4b-cd59-4040-b6d5-87a7cf51d63f-kube-api-access-pwv9f\") pod \"multus-hp6ft\" (UID: \"15670f4b-cd59-4040-b6d5-87a7cf51d63f\") " pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.113863 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.114283 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.114422 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc54c\" (UniqueName: \"kubernetes.io/projected/4dd2533b-aa3f-44c0-aff1-3a13c844ad5a-kube-api-access-cc54c\") pod \"node-resolver-tv49z\" (UID: \"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\") " pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.117136 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.123511 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.132189 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.140771 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.147662 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.200938 4721 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.206133 4721 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.206164 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.206173 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.229707 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.239592 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.246671 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 13:36:35 crc kubenswrapper[4721]: W1121 13:36:35.258025 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-a8f3735ca00f5c244bb0ca1a113b334459acb3ce2f794e6c2cf36515f9b848f0 WatchSource:0}: Error finding container a8f3735ca00f5c244bb0ca1a113b334459acb3ce2f794e6c2cf36515f9b848f0: Status 404 returned error can't find the container with id a8f3735ca00f5c244bb0ca1a113b334459acb3ce2f794e6c2cf36515f9b848f0 Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.261236 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.286801 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-tv49z" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.297500 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hp6ft" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.316837 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.331840 4721 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.333877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.333943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.333957 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.334048 4721 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.344426 4721 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.344517 4721 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.346196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.346229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.346241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.346259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.346274 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.349841 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.365888 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.372170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.372204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.372214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.372231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.372242 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.381535 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.392790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.392848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.392896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.392922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.392937 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: W1121 13:36:35.395589 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod553d849b_ea1f_436e_aa8f_190203f6ab6e.slice/crio-1e3cc392e8c703a268219e6d96cc8a75e5836e4cb52601c93191a56eb7ce473b WatchSource:0}: Error finding container 1e3cc392e8c703a268219e6d96cc8a75e5836e4cb52601c93191a56eb7ce473b: Status 404 returned error can't find the container with id 1e3cc392e8c703a268219e6d96cc8a75e5836e4cb52601c93191a56eb7ce473b Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.404746 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.408902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.408956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.408968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.408991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.409005 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: W1121 13:36:35.418476 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7db08ddd_c14b_40b0_a2fd_67f84d6e747a.slice/crio-1110b51553b6e2b69ff957ee8abe0b4faf00d0a2773d94e7f70f91395844be9b WatchSource:0}: Error finding container 1110b51553b6e2b69ff957ee8abe0b4faf00d0a2773d94e7f70f91395844be9b: Status 404 returned error can't find the container with id 1110b51553b6e2b69ff957ee8abe0b4faf00d0a2773d94e7f70f91395844be9b Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.423826 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.431284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.431335 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.431346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.431369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.431383 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.440782 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.440942 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.443283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.443333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.443344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.443365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.443379 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.509559 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.509717 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:36:36.50969115 +0000 UTC m=+34.487582408 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.548208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.548252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.548261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.548280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.548293 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.610783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.610830 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.610883 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.610908 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611008 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611040 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611080 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611093 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611164 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:36.611108704 +0000 UTC m=+34.589000052 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611181 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611190 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:36.611179055 +0000 UTC m=+34.589070443 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611197 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611206 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611243 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:36.611226947 +0000 UTC m=+34.589118305 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611286 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: E1121 13:36:35.611310 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:36.611304119 +0000 UTC m=+34.589195377 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.651019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.651114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.651134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.651164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.651193 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.753907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.753954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.753964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.753981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.753993 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.857592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.857681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.857698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.857719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.857736 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.961414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.961483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.961505 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.961541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:35 crc kubenswrapper[4721]: I1121 13:36:35.961560 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:35Z","lastTransitionTime":"2025-11-21T13:36:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.064526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.064569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.064579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.064596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.064606 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.168205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.168283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.168301 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.168331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.168348 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.197689 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"2aa25c159daf62d8f9f34c764c02fe6cc5df3024e2906a7a526059fc1b14b2db"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.199017 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerStarted","Data":"1e3cc392e8c703a268219e6d96cc8a75e5836e4cb52601c93191a56eb7ce473b"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.200155 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"309fbf6cc6ad29116dcba33e301cd6f45ab45f7f1c09dac53b6956374979e794"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.201625 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a8f3735ca00f5c244bb0ca1a113b334459acb3ce2f794e6c2cf36515f9b848f0"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.202919 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"317fa4eee6f9139efb015704bad46ac38479e18efa020114dcf5ddd8eb1d2118"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.204707 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.204749 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"1110b51553b6e2b69ff957ee8abe0b4faf00d0a2773d94e7f70f91395844be9b"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.206671 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerStarted","Data":"351ac47817f9a32bafefdd6ec869253d7e4c0a446b1516c8b436dd04ac0b28a0"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.207846 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-tv49z" event={"ID":"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a","Type":"ContainerStarted","Data":"a1d2a94f0457e2fe2c051d7a38cf41e8f597a53a4f608505a26ef284abf70ebe"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.272017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.272105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.272121 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.272148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.272165 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.375538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.375580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.375611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.375628 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.375641 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.480371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.480482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.480499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.480570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.480593 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.522278 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.522489 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:36:38.522442302 +0000 UTC m=+36.500333560 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.583364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.583413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.583424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.583442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.583456 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.623678 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.623767 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.623813 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.623853 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.623963 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624010 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.623967 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624043 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:38.62402067 +0000 UTC m=+36.601911928 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624201 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624084 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624237 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:38.624211215 +0000 UTC m=+36.602102493 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624246 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624254 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624278 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624330 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:38.624314377 +0000 UTC m=+36.602205635 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.624355 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:38.624343968 +0000 UTC m=+36.602235406 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.686761 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.686835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.686849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.686872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.686890 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.789389 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.789441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.789451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.789472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.789485 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.892802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.892841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.892850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.892868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.892879 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.981009 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.981189 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.981570 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.981640 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.981765 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:36 crc kubenswrapper[4721]: E1121 13:36:36.981833 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.990013 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.990856 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.992650 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.993328 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.994462 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.995117 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.995809 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.997791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.998256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.998271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.998297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.998048 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.998310 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:36Z","lastTransitionTime":"2025-11-21T13:36:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:36 crc kubenswrapper[4721]: I1121 13:36:36.999016 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.000150 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.000728 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.001842 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.002408 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.003412 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.003962 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.004451 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.005553 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.006026 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.007709 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.008479 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.009491 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.010016 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.011003 4721 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.011225 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.012921 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.014554 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.015919 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.016492 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.101284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.101328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.101344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.101362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.101372 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.185470 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-7gx9d"] Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.186019 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.189622 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.189828 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.190413 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.193043 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.203527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.203568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.203577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.203594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.203605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.210298 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.212999 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerStarted","Data":"1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.214724 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.214766 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.216705 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.216767 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.218199 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerStarted","Data":"49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.219681 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-tv49z" event={"ID":"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a","Type":"ContainerStarted","Data":"d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.221276 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.221377 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.222617 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" exitCode=0 Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.222674 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.232178 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.242296 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.254151 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.262010 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.272800 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.290024 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.301312 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.305795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.305834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.305845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.305861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.305879 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.313096 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.324385 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.333174 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td99q\" (UniqueName: \"kubernetes.io/projected/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-kube-api-access-td99q\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.333308 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-serviceca\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.333869 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-host\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.340814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.349497 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.360218 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.372245 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.382893 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.398779 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.408532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.408805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.408902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.408979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.409036 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.410553 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.422549 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.436087 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-host\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.436168 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td99q\" (UniqueName: \"kubernetes.io/projected/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-kube-api-access-td99q\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.436209 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-serviceca\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.436739 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-host\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.437314 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-serviceca\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.442191 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.462614 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.464986 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td99q\" (UniqueName: \"kubernetes.io/projected/b703fc8f-138e-4ba9-9e08-1e9d718e13e1-kube-api-access-td99q\") pod \"node-ca-7gx9d\" (UID: \"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\") " pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.474871 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.490261 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.498261 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-7gx9d" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.503721 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.511974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.512032 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.512055 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.512124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.512151 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: W1121 13:36:37.515380 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb703fc8f_138e_4ba9_9e08_1e9d718e13e1.slice/crio-a77dc8c1a7e2a1ce3f56dce0a201f45abed389e798eba07f91eed39865da1f76 WatchSource:0}: Error finding container a77dc8c1a7e2a1ce3f56dce0a201f45abed389e798eba07f91eed39865da1f76: Status 404 returned error can't find the container with id a77dc8c1a7e2a1ce3f56dce0a201f45abed389e798eba07f91eed39865da1f76 Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.516738 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.532447 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.615548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.615606 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.615659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.615689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.615706 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.720297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.720342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.720358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.720382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.720398 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.823088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.823131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.823143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.823160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.823172 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.927199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.927263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.927281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.927309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:37 crc kubenswrapper[4721]: I1121 13:36:37.927327 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:37Z","lastTransitionTime":"2025-11-21T13:36:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.030749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.030829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.030851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.030883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.030907 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.133432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.133502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.133526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.133557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.133582 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.228018 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-7gx9d" event={"ID":"b703fc8f-138e-4ba9-9e08-1e9d718e13e1","Type":"ContainerStarted","Data":"a77dc8c1a7e2a1ce3f56dce0a201f45abed389e798eba07f91eed39865da1f76"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.231038 4721 generic.go:334] "Generic (PLEG): container finished" podID="553d849b-ea1f-436e-aa8f-190203f6ab6e" containerID="49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303" exitCode=0 Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.231210 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerDied","Data":"49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.236509 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.236639 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.239265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.239305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.239335 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.239583 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.239615 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.251620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.282620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.295550 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.309317 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.329358 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.342748 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.343641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.343718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.343742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.343778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.343803 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.353759 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.366018 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.377487 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.389992 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.400667 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.414438 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.424167 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.447275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.447320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.447331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.447350 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.447361 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.548792 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.549161 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:36:42.549117476 +0000 UTC m=+40.527008744 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.552265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.552325 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.552339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.552362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.552375 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.651297 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.651364 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.651396 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.651432 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651498 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651594 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651613 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:42.651584408 +0000 UTC m=+40.629475766 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651617 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651637 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651676 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651699 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:42.65167913 +0000 UTC m=+40.629570488 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651876 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:42.651855275 +0000 UTC m=+40.629746533 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651707 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651908 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651923 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.651960 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:42.651953088 +0000 UTC m=+40.629844346 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.655492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.655537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.655551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.655570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.655585 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.757629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.757676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.757689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.757726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.757742 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.861210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.861289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.861315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.861347 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.861374 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.963972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.964012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.964020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.964037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.964047 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:38Z","lastTransitionTime":"2025-11-21T13:36:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.980913 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.980984 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:38 crc kubenswrapper[4721]: I1121 13:36:38.981116 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.981288 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.981899 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:38 crc kubenswrapper[4721]: E1121 13:36:38.982013 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.067522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.067579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.067590 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.067609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.067620 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.171154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.171220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.171240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.171265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.171284 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.251651 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.254660 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerStarted","Data":"36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.261138 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.261191 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.261237 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.264585 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-7gx9d" event={"ID":"b703fc8f-138e-4ba9-9e08-1e9d718e13e1","Type":"ContainerStarted","Data":"40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.274499 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.275270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.275358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.275377 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.275424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.275444 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.303083 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.321791 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.344057 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.366162 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.378935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.378996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.379013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.379035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.379080 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.399508 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.417781 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.435171 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.452119 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.467094 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.483702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.483768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.483716 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.483791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.483897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.483953 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.500171 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.514165 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.533594 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.551679 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.568130 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.587299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.587355 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.587374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.587399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.587414 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.592978 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.612493 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.632090 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.648715 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.668790 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.685588 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.689800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.689837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.689845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.689861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.689871 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.701826 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.721545 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.741712 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.758581 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:39Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.793040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.793109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.793122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.793142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.793157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.896624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.896699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.896714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.896742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.896759 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.999729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.999814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:39 crc kubenswrapper[4721]: I1121 13:36:39.999866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:39.999897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:39.999922 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:39Z","lastTransitionTime":"2025-11-21T13:36:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.103200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.103295 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.103315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.103340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.103357 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.206190 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.206238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.206250 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.206276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.206289 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.275657 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.309077 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.309122 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.309133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.309150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.309161 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.412550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.412613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.412625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.412661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.412674 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.516039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.516147 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.516163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.516188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.516206 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.619813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.619927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.619955 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.619991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.620015 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.722664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.723187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.723204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.723230 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.723245 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.826450 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.826497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.826509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.826526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.826541 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.929643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.929708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.929725 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.929749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.929763 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:40Z","lastTransitionTime":"2025-11-21T13:36:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.981544 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.981634 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:40 crc kubenswrapper[4721]: E1121 13:36:40.981702 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:40 crc kubenswrapper[4721]: I1121 13:36:40.981730 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:40 crc kubenswrapper[4721]: E1121 13:36:40.981887 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:40 crc kubenswrapper[4721]: E1121 13:36:40.981981 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.031897 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.031966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.031984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.032011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.032029 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.135200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.135261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.135274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.135297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.135310 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.238812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.238869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.238884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.238903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.238917 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.281753 4721 generic.go:334] "Generic (PLEG): container finished" podID="553d849b-ea1f-436e-aa8f-190203f6ab6e" containerID="36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa" exitCode=0 Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.281861 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerDied","Data":"36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.303873 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.334632 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.341985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.342119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.342187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.342260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.342337 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.351164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.367790 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.390723 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.411049 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.437354 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.445719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.445813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.445841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.445875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.445898 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.459203 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.479197 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.505883 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.526180 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.547974 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.548304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.548375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.548391 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.548422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.548441 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.574218 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:41Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.652222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.652540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.652555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.652574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.652587 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.756732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.756780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.756793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.756815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.756829 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.859019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.859381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.859516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.859552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.859569 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.963305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.963671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.963830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.963977 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.964143 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:41Z","lastTransitionTime":"2025-11-21T13:36:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:41 crc kubenswrapper[4721]: I1121 13:36:41.999782 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.000301 4721 scope.go:117] "RemoveContainer" containerID="ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.067551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.067820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.067966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.068185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.068335 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.171043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.171108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.171125 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.171150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.171168 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.274723 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.274792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.274815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.274850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.274872 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.298217 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.378930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.378972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.378984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.379004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.379015 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.481036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.481083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.481093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.481107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.481118 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.584111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.584157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.584170 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.584191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.584205 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.594630 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.594793 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:36:50.594773743 +0000 UTC m=+48.572665001 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.687316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.687848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.687858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.687875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.687885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.694999 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.695041 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.695086 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.695118 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695239 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695295 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:50.695277363 +0000 UTC m=+48.673168621 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695707 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695724 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695737 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695738 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695764 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:50.695756975 +0000 UTC m=+48.673648223 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695707 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695779 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:50.695773476 +0000 UTC m=+48.673664734 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695782 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695792 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.695823 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:50.695815137 +0000 UTC m=+48.673706395 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.790934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.790972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.790982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.791000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.791012 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.895665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.896099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.896210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.896306 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.896382 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.981908 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.982105 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.982535 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.982587 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.982728 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:42 crc kubenswrapper[4721]: E1121 13:36:42.982820 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.999711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.999774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:42 crc kubenswrapper[4721]: I1121 13:36:42.999794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:42.999823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:42.999845 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:42Z","lastTransitionTime":"2025-11-21T13:36:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.007361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.029164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.051038 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.070113 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.084955 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.102382 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.102936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.103016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.103043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.103118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.103150 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.114397 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.137644 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.172772 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.189205 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.203971 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.205847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.205881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.205890 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.205908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.205918 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.242900 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.272368 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.299571 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.304602 4721 generic.go:334] "Generic (PLEG): container finished" podID="553d849b-ea1f-436e-aa8f-190203f6ab6e" containerID="d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415" exitCode=0 Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.304676 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerDied","Data":"d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.307794 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.307803 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.307936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.307962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.307989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.308011 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.309872 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.310194 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.325251 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.339950 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.354886 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.375580 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.389699 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.404708 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.415827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.415851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.415859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.415872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.415880 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.417164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.430957 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.442821 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.454776 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.467800 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.477505 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.489668 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.507408 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.517836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.517856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.517865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.517878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.517887 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.523571 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.535675 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.547572 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.559350 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.568955 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.580159 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.588965 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.605558 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.619775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.619813 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.619824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.619839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.619849 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.630958 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.645276 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.661910 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.676978 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.691188 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.708564 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:43Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.722596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.722624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.722634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.722649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.722660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.824683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.824721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.824733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.824749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.824759 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.927693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.927755 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.927770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.927794 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:43 crc kubenswrapper[4721]: I1121 13:36:43.927808 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:43Z","lastTransitionTime":"2025-11-21T13:36:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.030360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.030437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.030449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.030468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.030482 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.133056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.133149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.133166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.133195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.133213 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.236733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.236783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.236800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.236823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.236841 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.318608 4721 generic.go:334] "Generic (PLEG): container finished" podID="553d849b-ea1f-436e-aa8f-190203f6ab6e" containerID="93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58" exitCode=0 Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.318709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerDied","Data":"93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.341078 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.341134 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.341146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.341166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.341177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.343798 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.373355 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.392527 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.410366 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.430155 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.443561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.443611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.443625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.443654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.443669 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.451841 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.466084 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.480695 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.493668 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.505972 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.522915 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.539292 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.549144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.549354 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.549425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.549496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.549560 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.552832 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.565851 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:44Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.653614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.653670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.653688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.653715 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.653734 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.756847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.757821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.757929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.758031 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.758157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.862033 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.862120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.862141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.862167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.862185 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.965529 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.965566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.965577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.965596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.965609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:44Z","lastTransitionTime":"2025-11-21T13:36:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.981310 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.981398 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:44 crc kubenswrapper[4721]: E1121 13:36:44.981509 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:44 crc kubenswrapper[4721]: I1121 13:36:44.981341 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:44 crc kubenswrapper[4721]: E1121 13:36:44.981612 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:44 crc kubenswrapper[4721]: E1121 13:36:44.981682 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.068821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.068875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.068886 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.068908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.068919 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.173195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.173292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.173312 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.173371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.173393 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.275804 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.275852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.275863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.275880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.275893 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.330404 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerStarted","Data":"46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.337425 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.338396 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.338483 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.338516 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.349753 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.373604 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.380240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.380264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.380275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.380293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.380305 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.382111 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.388416 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.413765 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.431003 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.446385 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.466557 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.482958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.483004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.483016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.483035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.483047 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.488979 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.510956 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.545789 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.565122 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.582868 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.586004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.586045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.586083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.586108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.586126 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.597660 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.612828 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.657708 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.677389 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.689842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.689903 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.689921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.689946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.689963 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.697306 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.717508 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.735452 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.750690 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.761597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.761637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.761655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.761681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.761700 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.771546 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: E1121 13:36:45.782939 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.788754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.788802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.788818 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.788839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.788852 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.791311 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: E1121 13:36:45.808451 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.812649 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.813006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.813052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.813108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.813131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.813147 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: E1121 13:36:45.836424 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.840441 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.842514 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.842630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.842714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.842845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.842934 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.858318 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: E1121 13:36:45.860369 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.865144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.865199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.865216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.865240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.865256 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.875335 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: E1121 13:36:45.881864 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: E1121 13:36:45.882254 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.884296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.884344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.884357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.884381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.884395 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.895915 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.915769 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.927804 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:45Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.987791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.987904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.987962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.988043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:45 crc kubenswrapper[4721]: I1121 13:36:45.988158 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:45Z","lastTransitionTime":"2025-11-21T13:36:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.092268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.092347 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.092361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.092387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.092399 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.196374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.196433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.196443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.196469 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.196490 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.300670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.300734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.300755 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.300786 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.300813 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.403980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.404036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.404093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.404126 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.404149 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.508424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.508509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.508533 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.508560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.508581 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.612006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.612046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.612057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.612105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.612115 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.684543 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85"] Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.685048 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.687093 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.687433 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.699850 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.714935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.714996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.715015 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.715100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.715118 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.721429 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.742452 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2fc06731-28f7-44c2-8132-c4e624f894fb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.742538 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2fc06731-28f7-44c2-8132-c4e624f894fb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.742561 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sh6t\" (UniqueName: \"kubernetes.io/projected/2fc06731-28f7-44c2-8132-c4e624f894fb-kube-api-access-4sh6t\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.742586 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2fc06731-28f7-44c2-8132-c4e624f894fb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.748523 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.771237 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.790900 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.803141 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.813626 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.816838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.816878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.816889 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.816905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.816915 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.822473 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.831630 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.841101 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.843644 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2fc06731-28f7-44c2-8132-c4e624f894fb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.843677 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2fc06731-28f7-44c2-8132-c4e624f894fb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.843694 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sh6t\" (UniqueName: \"kubernetes.io/projected/2fc06731-28f7-44c2-8132-c4e624f894fb-kube-api-access-4sh6t\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.843727 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2fc06731-28f7-44c2-8132-c4e624f894fb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.844336 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2fc06731-28f7-44c2-8132-c4e624f894fb-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.844638 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2fc06731-28f7-44c2-8132-c4e624f894fb-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.854144 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2fc06731-28f7-44c2-8132-c4e624f894fb-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.858955 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.861473 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sh6t\" (UniqueName: \"kubernetes.io/projected/2fc06731-28f7-44c2-8132-c4e624f894fb-kube-api-access-4sh6t\") pod \"ovnkube-control-plane-749d76644c-5nx85\" (UID: \"2fc06731-28f7-44c2-8132-c4e624f894fb\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.873137 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.886995 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.897887 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.909326 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:46Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.919661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.919733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.919756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.919787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.919810 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:46Z","lastTransitionTime":"2025-11-21T13:36:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.981086 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.981201 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:46 crc kubenswrapper[4721]: E1121 13:36:46.981321 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.981640 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:46 crc kubenswrapper[4721]: E1121 13:36:46.981687 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:46 crc kubenswrapper[4721]: E1121 13:36:46.981736 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:46 crc kubenswrapper[4721]: I1121 13:36:46.999518 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.022964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.023020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.023041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.023113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.023133 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: W1121 13:36:47.034987 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fc06731_28f7_44c2_8132_c4e624f894fb.slice/crio-2aa41c5fb23a7b6cfa3e31f0ec3fa5c610472a9a3ed9f77f2f527df8dfd3d379 WatchSource:0}: Error finding container 2aa41c5fb23a7b6cfa3e31f0ec3fa5c610472a9a3ed9f77f2f527df8dfd3d379: Status 404 returned error can't find the container with id 2aa41c5fb23a7b6cfa3e31f0ec3fa5c610472a9a3ed9f77f2f527df8dfd3d379 Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.126560 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.126620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.126631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.126651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.126664 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.229537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.229692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.229737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.229754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.229768 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.332988 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.333052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.333102 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.333128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.333146 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.348912 4721 generic.go:334] "Generic (PLEG): container finished" podID="553d849b-ea1f-436e-aa8f-190203f6ab6e" containerID="46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e" exitCode=0 Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.349029 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerDied","Data":"46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.354565 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" event={"ID":"2fc06731-28f7-44c2-8132-c4e624f894fb","Type":"ContainerStarted","Data":"140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.354645 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" event={"ID":"2fc06731-28f7-44c2-8132-c4e624f894fb","Type":"ContainerStarted","Data":"2aa41c5fb23a7b6cfa3e31f0ec3fa5c610472a9a3ed9f77f2f527df8dfd3d379"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.379563 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.398003 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.415002 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.435497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.435541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.435552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.435570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.435583 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.446489 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.462988 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.478936 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.495096 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.512488 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.526419 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.542477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.542553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.542573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.542614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.542629 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.545518 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.564398 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.578420 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.594157 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.613616 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.627788 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.646029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.646088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.646098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.646111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.646121 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.749411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.749471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.749493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.749519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.749537 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.818288 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-sr6gv"] Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.819167 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:47 crc kubenswrapper[4721]: E1121 13:36:47.819224 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.837833 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853100 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853153 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853095 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49gjj\" (UniqueName: \"kubernetes.io/projected/e613396d-3bd5-4a6f-8f9a-507f40954ee6-kube-api-access-49gjj\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853301 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853171 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.853384 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.863733 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.880396 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.899658 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.916049 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.935769 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955304 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955527 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49gjj\" (UniqueName: \"kubernetes.io/projected/e613396d-3bd5-4a6f-8f9a-507f40954ee6-kube-api-access-49gjj\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.955616 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:47Z","lastTransitionTime":"2025-11-21T13:36:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:47 crc kubenswrapper[4721]: E1121 13:36:47.956393 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:47 crc kubenswrapper[4721]: E1121 13:36:47.956653 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:48.456622091 +0000 UTC m=+46.434513389 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.956595 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.977168 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:47 crc kubenswrapper[4721]: I1121 13:36:47.985338 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49gjj\" (UniqueName: \"kubernetes.io/projected/e613396d-3bd5-4a6f-8f9a-507f40954ee6-kube-api-access-49gjj\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.001418 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:47Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.019992 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.032856 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.044946 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.057848 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.057899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.057913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.057933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.057946 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.059751 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.076400 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.091606 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.104347 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.159800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.159835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.159845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.159858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.159867 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.262376 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.262418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.262428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.262445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.262456 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.363245 4721 generic.go:334] "Generic (PLEG): container finished" podID="553d849b-ea1f-436e-aa8f-190203f6ab6e" containerID="ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491" exitCode=0 Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.363371 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerDied","Data":"ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.364801 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.364899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.364927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.364959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.364982 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.368148 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" event={"ID":"2fc06731-28f7-44c2-8132-c4e624f894fb","Type":"ContainerStarted","Data":"8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.382391 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.403819 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.426499 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.445959 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.460752 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:48 crc kubenswrapper[4721]: E1121 13:36:48.461883 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:48 crc kubenswrapper[4721]: E1121 13:36:48.462039 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:49.462020817 +0000 UTC m=+47.439912095 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.470260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.470797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.470832 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.470855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.470870 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.471841 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.496759 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.515192 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.532190 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.552092 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.564601 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.573546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.573586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.573595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.573613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.573625 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.576451 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.589604 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.600502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.619943 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.649260 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.664559 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.676989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.677056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.677090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.677117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.677131 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.679978 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.698135 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.717194 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.735861 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.754949 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.774012 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.781033 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.781098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.781115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.781137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.781152 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.793706 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.809111 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.825078 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.842200 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.872501 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.883964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.884025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.884043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.884101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.884128 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.893865 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.912992 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.926411 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.943475 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.959733 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:48Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.980997 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.981087 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.981281 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:48 crc kubenswrapper[4721]: E1121 13:36:48.981275 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:48 crc kubenswrapper[4721]: E1121 13:36:48.981436 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:48 crc kubenswrapper[4721]: E1121 13:36:48.981588 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.987426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.987495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.987520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.987551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:48 crc kubenswrapper[4721]: I1121 13:36:48.987576 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:48Z","lastTransitionTime":"2025-11-21T13:36:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.090939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.090994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.091003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.091020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.091039 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.193623 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.193692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.193748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.193773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.193789 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.297336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.297400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.297413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.297443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.297462 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.379469 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" event={"ID":"553d849b-ea1f-436e-aa8f-190203f6ab6e","Type":"ContainerStarted","Data":"2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.399994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.400115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.400139 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.400166 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.400186 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.471456 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:49 crc kubenswrapper[4721]: E1121 13:36:49.472039 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:49 crc kubenswrapper[4721]: E1121 13:36:49.472265 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:51.472237811 +0000 UTC m=+49.450129079 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.506901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.506969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.506991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.507030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.507049 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.613839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.614263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.614334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.614423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.614488 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.717962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.718109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.718137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.718171 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.718193 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.821662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.821735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.821757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.821788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.821809 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.925481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.925579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.925604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.925634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.925654 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:49Z","lastTransitionTime":"2025-11-21T13:36:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:49 crc kubenswrapper[4721]: I1121 13:36:49.981591 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:49 crc kubenswrapper[4721]: E1121 13:36:49.981783 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.029294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.029404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.029431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.029464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.029488 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.132692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.132753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.132767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.132784 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.132796 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.236113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.236168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.236186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.236210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.236226 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.339033 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.339491 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.339673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.339869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.340055 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.401603 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.413979 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.427578 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.440169 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.442748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.442789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.442800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.442814 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.442825 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.452944 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.466503 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.478510 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.491691 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.504465 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.520915 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.532996 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.542195 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.544789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.544835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.544850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.544869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.544885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.556799 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.571024 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.590684 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.603568 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:50Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.647888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.647940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.647962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.647981 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.647995 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.683659 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.683889 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:37:06.683856397 +0000 UTC m=+64.661747685 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.750643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.750689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.750697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.750716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.750727 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.784820 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.784884 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.784925 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.784980 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785022 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785040 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785087 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785098 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785116 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:06.785098317 +0000 UTC m=+64.762989575 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785139 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:06.785126157 +0000 UTC m=+64.763017415 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785149 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785201 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785237 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785261 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785244 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:06.78521724 +0000 UTC m=+64.763108538 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.785363 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:06.785331553 +0000 UTC m=+64.763222851 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.853765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.853816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.853833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.853854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.853870 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.957024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.957098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.957111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.957130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.957143 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:50Z","lastTransitionTime":"2025-11-21T13:36:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.980823 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.980891 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:50 crc kubenswrapper[4721]: I1121 13:36:50.980930 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.980978 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.981144 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:50 crc kubenswrapper[4721]: E1121 13:36:50.981247 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.059963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.059995 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.060005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.060022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.060034 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.162787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.162851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.162867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.162890 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.162905 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.265928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.265984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.266003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.266030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.266047 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.368424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.368480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.368493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.368512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.368525 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.471571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.471638 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.471655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.471680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.471697 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.493414 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:51 crc kubenswrapper[4721]: E1121 13:36:51.493682 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:51 crc kubenswrapper[4721]: E1121 13:36:51.493862 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:36:55.493820683 +0000 UTC m=+53.471711991 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.574540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.574614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.574633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.574661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.574682 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.677412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.677780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.677798 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.677822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.677839 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.780826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.780870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.780880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.780895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.780905 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.883913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.883964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.883977 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.883996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.884009 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.981683 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:51 crc kubenswrapper[4721]: E1121 13:36:51.981889 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.987413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.987483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.987501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.987528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:51 crc kubenswrapper[4721]: I1121 13:36:51.987547 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:51Z","lastTransitionTime":"2025-11-21T13:36:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.090530 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.090582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.090596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.090614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.090624 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.193233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.193288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.193305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.193330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.193347 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.296472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.296540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.296565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.296592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.296605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.319964 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.342778 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.372483 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.386363 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.391330 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/0.log" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.394609 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64" exitCode=1 Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.394671 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.395923 4721 scope.go:117] "RemoveContainer" containerID="790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.399902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.399945 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.399959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.399979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.399994 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.404580 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.420573 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.444457 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.464866 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.478352 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.496864 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.503670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.503716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.503737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.503788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.503811 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.509798 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.530228 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.546674 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.564216 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.580645 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.598105 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.605862 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.606172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.606267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.606371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.606460 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.611694 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.628753 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.654151 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:51Z\\\",\\\"message\\\":\\\"o:208] Removed *v1.Pod event handler 3\\\\nI1121 13:36:51.787755 6015 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.787367 6015 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1121 13:36:51.788345 6015 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.788500 6015 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788592 6015 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788982 6015 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:36:51.789020 6015 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:36:51.789126 6015 factory.go:656] Stopping watch factory\\\\nI1121 13:36:51.789164 6015 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 13:36:51.789178 6015 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.665524 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.681880 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.696284 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.709440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.709475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.709484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.709498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.709511 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.709794 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.727197 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.739721 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.752965 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.767783 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.780570 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.792509 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.805798 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.812000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.812057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.812091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.812115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.812129 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.823161 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.837965 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.851397 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.914651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.914698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.914711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.914728 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.914739 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:52Z","lastTransitionTime":"2025-11-21T13:36:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.981187 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.981272 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.981373 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:52 crc kubenswrapper[4721]: E1121 13:36:52.981594 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:52 crc kubenswrapper[4721]: E1121 13:36:52.981727 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:52 crc kubenswrapper[4721]: E1121 13:36:52.981947 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:52 crc kubenswrapper[4721]: I1121 13:36:52.995901 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:52Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.006161 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.016725 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.016764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.016773 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.016788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.016796 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.018575 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.040232 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.062468 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.074422 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.091553 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.104248 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.119111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.119156 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.119165 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.119180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.119190 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.119520 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.154029 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:51Z\\\",\\\"message\\\":\\\"o:208] Removed *v1.Pod event handler 3\\\\nI1121 13:36:51.787755 6015 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.787367 6015 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1121 13:36:51.788345 6015 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.788500 6015 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788592 6015 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788982 6015 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:36:51.789020 6015 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:36:51.789126 6015 factory.go:656] Stopping watch factory\\\\nI1121 13:36:51.789164 6015 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 13:36:51.789178 6015 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.165826 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.177874 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.191292 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.207032 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.219193 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.221550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.221595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.221610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.221630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.221642 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.237514 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.325515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.325842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.325924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.325997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.326077 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.399975 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/1.log" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.400525 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/0.log" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.402773 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400" exitCode=1 Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.402820 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.402873 4721 scope.go:117] "RemoveContainer" containerID="790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.403769 4721 scope.go:117] "RemoveContainer" containerID="ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400" Nov 21 13:36:53 crc kubenswrapper[4721]: E1121 13:36:53.404167 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.427368 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.429233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.429262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.429271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.429291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.429316 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.443791 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.459985 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.474997 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.500242 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:51Z\\\",\\\"message\\\":\\\"o:208] Removed *v1.Pod event handler 3\\\\nI1121 13:36:51.787755 6015 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.787367 6015 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1121 13:36:51.788345 6015 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.788500 6015 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788592 6015 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788982 6015 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:36:51.789020 6015 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:36:51.789126 6015 factory.go:656] Stopping watch factory\\\\nI1121 13:36:51.789164 6015 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 13:36:51.789178 6015 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:53Z\\\",\\\"message\\\":\\\"rk=default : 1.252052ms\\\\nI1121 13:36:53.324221 6274 services_controller.go:356] Processing sync for service openshift-operator-lifecycle-manager/package-server-manager-metrics for network=default\\\\nI1121 13:36:53.324140 6274 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 13:36:53.324284 6274 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:36:53.324322 6274 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 13:36:53.324338 6274 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.515870 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.530840 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.532303 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.532342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.532351 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.532367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.532377 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.540765 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.558467 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.572893 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.589033 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.608485 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.623944 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.634908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.634951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.634963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.634988 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.635001 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.637575 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.653234 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.672159 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:53Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.738034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.738288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.738383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.738452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.738506 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.842110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.842176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.842196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.842221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.842238 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.946467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.946539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.946556 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.946582 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.946602 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:53Z","lastTransitionTime":"2025-11-21T13:36:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:53 crc kubenswrapper[4721]: I1121 13:36:53.981532 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:53 crc kubenswrapper[4721]: E1121 13:36:53.981680 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.049856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.049914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.049956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.049986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.050005 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.153323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.153397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.153415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.153442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.153463 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.256650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.256719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.256741 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.256769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.256792 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.359912 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.359959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.359972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.359990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.360003 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.408384 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/1.log" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.462859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.462921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.462935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.462954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.462967 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.566466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.566523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.566541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.566566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.566598 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.669820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.669874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.669908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.669936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.669957 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.772674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.772738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.772757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.772782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.772799 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.876576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.876653 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.876679 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.876745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.876773 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980302 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980334 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:54Z","lastTransitionTime":"2025-11-21T13:36:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980654 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980713 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:54 crc kubenswrapper[4721]: I1121 13:36:54.980807 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:54 crc kubenswrapper[4721]: E1121 13:36:54.980809 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:54 crc kubenswrapper[4721]: E1121 13:36:54.980908 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:54 crc kubenswrapper[4721]: E1121 13:36:54.980978 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.083493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.083562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.083577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.083602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.083619 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.187290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.187374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.187397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.187868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.188231 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.291726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.291781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.291790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.291808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.291851 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.395198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.395261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.395285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.395314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:55 crc kubenswrapper[4721]: I1121 13:36:55.395337 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.497925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.497966 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.497983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.498008 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.498028 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.549788 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:55.549966 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:55.550049 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:03.550019466 +0000 UTC m=+61.527910754 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.601021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.601143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.601171 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.601200 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.601223 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.704394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.704475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.704486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.704512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.704525 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.807994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.808040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.808052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.808093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.808107 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.911438 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.911509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.911535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.911570 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.911593 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:55Z","lastTransitionTime":"2025-11-21T13:36:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:55.981262 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:55.981714 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.014681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.014734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.014750 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.014776 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.014793 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.118979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.119532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.119553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.119581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.119604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.148518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.148544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.148552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.148565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.148575 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.170115 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.176420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.176562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.176629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.176709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.176785 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.196938 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.202283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.202324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.202337 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.202361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.202376 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.222839 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.229696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.229816 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.229874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.229943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.230007 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.256883 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.265147 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.265509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.265601 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.265681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.265766 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.310563 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.310723 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.312264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.312300 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.312315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.312336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.312350 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.414647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.414681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.414693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.414711 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.414723 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.518133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.518197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.518214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.518240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.518258 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.621714 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.622133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.622301 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.622441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.622570 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.725314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.725367 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.725384 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.725406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.725423 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.752204 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.763389 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.768555 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.791101 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.821806 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:51Z\\\",\\\"message\\\":\\\"o:208] Removed *v1.Pod event handler 3\\\\nI1121 13:36:51.787755 6015 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.787367 6015 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1121 13:36:51.788345 6015 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.788500 6015 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788592 6015 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788982 6015 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:36:51.789020 6015 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:36:51.789126 6015 factory.go:656] Stopping watch factory\\\\nI1121 13:36:51.789164 6015 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 13:36:51.789178 6015 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:53Z\\\",\\\"message\\\":\\\"rk=default : 1.252052ms\\\\nI1121 13:36:53.324221 6274 services_controller.go:356] Processing sync for service openshift-operator-lifecycle-manager/package-server-manager-metrics for network=default\\\\nI1121 13:36:53.324140 6274 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 13:36:53.324284 6274 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:36:53.324322 6274 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 13:36:53.324338 6274 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.827685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.827731 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.827743 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.827762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.827774 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.842946 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.866419 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.883795 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.903620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.920240 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.930934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.931000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.931018 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.931047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.931101 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:56Z","lastTransitionTime":"2025-11-21T13:36:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.938456 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.956136 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.973617 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.981613 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.981652 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.981780 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.981875 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.982109 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:56 crc kubenswrapper[4721]: E1121 13:36:56.982294 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:56 crc kubenswrapper[4721]: I1121 13:36:56.991502 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:56Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.011387 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:57Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.028529 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:57Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.033877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.033915 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.033930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.033951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.033967 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.043261 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:57Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.054814 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:36:57Z is after 2025-08-24T17:21:41Z" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.136876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.136940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.136959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.136984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.137000 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.240339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.240412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.240435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.240462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.240479 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.343997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.344096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.344115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.344140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.344159 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.447094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.447141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.447178 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.447198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.447211 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.550304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.550365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.550375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.550401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.550413 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.654021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.654148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.654204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.654241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.654266 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.758130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.758248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.758263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.758285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.758299 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.861526 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.861682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.861705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.861774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.861796 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.964212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.964273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.964290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.964314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.964330 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:57Z","lastTransitionTime":"2025-11-21T13:36:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:57 crc kubenswrapper[4721]: I1121 13:36:57.981409 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:57 crc kubenswrapper[4721]: E1121 13:36:57.981548 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.067225 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.067299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.067317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.067345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.067363 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.170607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.170674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.170691 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.170717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.170734 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.274174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.274238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.274261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.274290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.274312 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.377546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.377580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.377591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.377608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.377623 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.480083 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.480147 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.480161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.480183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.480197 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.583003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.583101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.583118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.583140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.583155 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.686038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.686479 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.686619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.686775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.686907 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.789712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.789750 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.789759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.789775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.789786 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.892572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.892629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.892649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.892672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.892694 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.981229 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.981263 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.981371 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:36:58 crc kubenswrapper[4721]: E1121 13:36:58.981533 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:36:58 crc kubenswrapper[4721]: E1121 13:36:58.982248 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:36:58 crc kubenswrapper[4721]: E1121 13:36:58.982363 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.995671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.995990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.996117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.996234 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:58 crc kubenswrapper[4721]: I1121 13:36:58.996343 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:58Z","lastTransitionTime":"2025-11-21T13:36:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.098824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.098888 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.098911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.098985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.099003 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.201590 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.201650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.201663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.201683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.201697 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.304836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.304931 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.304948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.304972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.304987 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.409264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.409355 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.409383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.409417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.409455 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.512936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.512970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.512982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.512999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.513012 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.615929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.616197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.616210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.616228 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.616241 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.718822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.718875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.718893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.718919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.718934 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.821287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.821358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.821379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.821407 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.821432 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.924595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.924652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.924668 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.924692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.924708 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:36:59Z","lastTransitionTime":"2025-11-21T13:36:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:36:59 crc kubenswrapper[4721]: I1121 13:36:59.980864 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:36:59 crc kubenswrapper[4721]: E1121 13:36:59.981340 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.027858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.027904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.027917 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.027937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.027950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.130423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.130489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.130507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.130537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.130554 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.233220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.233287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.233304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.233333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.233353 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.335964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.336406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.336565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.336719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.336845 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.439579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.440020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.440208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.440384 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.440585 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.544251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.544318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.544338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.544363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.544380 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.648196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.648256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.648282 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.648308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.648326 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.751425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.751471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.751487 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.751512 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.751530 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.854771 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.854831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.854850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.854876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.854894 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.958382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.958432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.958448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.958467 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.958480 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:00Z","lastTransitionTime":"2025-11-21T13:37:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.981278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:00 crc kubenswrapper[4721]: E1121 13:37:00.981597 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.981338 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:00 crc kubenswrapper[4721]: E1121 13:37:00.981941 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:00 crc kubenswrapper[4721]: I1121 13:37:00.981280 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:00 crc kubenswrapper[4721]: E1121 13:37:00.982345 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.061330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.061399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.061418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.061447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.061467 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.166483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.166541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.166559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.166586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.166603 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.269343 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.270340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.270403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.270441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.270465 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.373169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.373226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.373239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.373265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.373283 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.476766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.476930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.476949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.476974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.476995 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.579760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.579821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.579834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.579853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.579865 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.682461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.682538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.682559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.682594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.682614 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.785845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.785922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.785937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.785959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.785974 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.889052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.889136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.889154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.889180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.889202 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.980870 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:01 crc kubenswrapper[4721]: E1121 13:37:01.981119 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.992428 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.992532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.992553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.992586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:01 crc kubenswrapper[4721]: I1121 13:37:01.992609 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:01Z","lastTransitionTime":"2025-11-21T13:37:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.096764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.096835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.096858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.096883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.096898 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.201058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.201158 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.201179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.201207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.201225 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.305000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.305098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.305118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.305142 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.305160 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.409006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.409101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.409116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.409143 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.409157 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.512543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.512597 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.512609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.512633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.512646 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.627629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.627703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.627717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.628025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.628056 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.732137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.732215 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.732233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.732768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.732825 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.836168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.836255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.836281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.836317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.836341 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.939591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.939658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.939676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.939704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.939726 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:02Z","lastTransitionTime":"2025-11-21T13:37:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.981590 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.981673 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:02 crc kubenswrapper[4721]: E1121 13:37:02.981816 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:02 crc kubenswrapper[4721]: I1121 13:37:02.981870 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:02 crc kubenswrapper[4721]: E1121 13:37:02.982133 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:02 crc kubenswrapper[4721]: E1121 13:37:02.982367 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.006693 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.032961 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.042646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.042709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.042729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.042756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.042775 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.056329 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.078746 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.095936 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.120683 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.146658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.146730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.146753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.146791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.146817 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.150635 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.172359 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.191499 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.212688 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.232936 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.251368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.251435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.251452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.251478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.251501 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.256672 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.277620 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.295488 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.316648 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.347170 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://790e0789516a161559b418b36c74acfb6c3ea89affa597c1fbc8e509ca299c64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:51Z\\\",\\\"message\\\":\\\"o:208] Removed *v1.Pod event handler 3\\\\nI1121 13:36:51.787755 6015 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.787367 6015 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1121 13:36:51.788345 6015 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1121 13:36:51.788500 6015 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788592 6015 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1121 13:36:51.788982 6015 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:36:51.789020 6015 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:36:51.789126 6015 factory.go:656] Stopping watch factory\\\\nI1121 13:36:51.789164 6015 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 13:36:51.789178 6015 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:53Z\\\",\\\"message\\\":\\\"rk=default : 1.252052ms\\\\nI1121 13:36:53.324221 6274 services_controller.go:356] Processing sync for service openshift-operator-lifecycle-manager/package-server-manager-metrics for network=default\\\\nI1121 13:36:53.324140 6274 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 13:36:53.324284 6274 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:36:53.324322 6274 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 13:36:53.324338 6274 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.354493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.354585 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.354613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.354647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.354673 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.365360 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:03Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.457017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.457108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.457123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.457155 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.457172 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.561372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.561458 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.561481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.561519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.561542 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.648635 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:03 crc kubenswrapper[4721]: E1121 13:37:03.648980 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:37:03 crc kubenswrapper[4721]: E1121 13:37:03.649173 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:19.649139834 +0000 UTC m=+77.627031132 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.663934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.664000 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.664013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.664038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.664054 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.767883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.767944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.767954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.767976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.767995 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.871212 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.871275 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.871285 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.871307 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.871319 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.974865 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.974926 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.974938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.974962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.974977 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:03Z","lastTransitionTime":"2025-11-21T13:37:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:03 crc kubenswrapper[4721]: I1121 13:37:03.981476 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:03 crc kubenswrapper[4721]: E1121 13:37:03.981694 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.078116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.078184 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.078207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.078240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.078265 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.182150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.182619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.182632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.182651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.182663 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.286150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.286236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.286253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.286278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.286296 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.389551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.389608 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.389621 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.389641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.389659 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.493024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.493136 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.493163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.493193 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.493215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.596540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.596594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.596610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.596635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.596652 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.699861 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.699910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.699921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.699939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.699950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.802499 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.802545 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.802558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.802576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.802589 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.905320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.905375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.905394 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.905418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.905438 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:04Z","lastTransitionTime":"2025-11-21T13:37:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.981034 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.981134 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:04 crc kubenswrapper[4721]: I1121 13:37:04.981039 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:04 crc kubenswrapper[4721]: E1121 13:37:04.981291 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:04 crc kubenswrapper[4721]: E1121 13:37:04.981427 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:04 crc kubenswrapper[4721]: E1121 13:37:04.981589 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.009207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.009284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.009309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.009338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.009359 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.113815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.113898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.113922 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.113985 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.114024 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.218160 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.218222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.218233 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.218255 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.218269 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.317920 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.319550 4721 scope.go:117] "RemoveContainer" containerID="ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.320730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.320832 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.320856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.320886 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.320911 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.344087 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.365026 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.386690 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.405002 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.424298 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.424371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.424393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.424424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.424446 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.424901 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.444493 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.464962 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.484390 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.500905 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.527781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.527834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.527847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.527868 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.527883 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.529937 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:53Z\\\",\\\"message\\\":\\\"rk=default : 1.252052ms\\\\nI1121 13:36:53.324221 6274 services_controller.go:356] Processing sync for service openshift-operator-lifecycle-manager/package-server-manager-metrics for network=default\\\\nI1121 13:36:53.324140 6274 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 13:36:53.324284 6274 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:36:53.324322 6274 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 13:36:53.324338 6274 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.546853 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.563104 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.581364 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.597733 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.628273 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.630162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.630208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.630220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.630241 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.630254 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.641788 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.658297 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:05Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.733365 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.733401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.733410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.733426 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.733435 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.841696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.841788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.841829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.841863 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.841882 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.945197 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.945267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.945290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.945323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.945345 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:05Z","lastTransitionTime":"2025-11-21T13:37:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:05 crc kubenswrapper[4721]: I1121 13:37:05.981165 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:05 crc kubenswrapper[4721]: E1121 13:37:05.981341 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.048729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.048792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.048809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.048834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.048852 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.151855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.151905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.151916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.151933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.151946 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.256473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.256554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.256579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.256609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.256631 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.359042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.359096 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.359107 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.359123 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.359134 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.408569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.408626 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.408637 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.408654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.408665 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.423362 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.428400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.428468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.428485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.428511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.428533 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.444968 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.450602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.450650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.450659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.450681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.450693 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.455189 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/1.log" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.460292 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.460815 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.467315 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.472259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.472314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.472332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.472352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.472710 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.477633 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.486704 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.491490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.491554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.491573 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.491602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.491620 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.495104 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.506522 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.506688 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.508960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.508999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.509012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.509030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.509043 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.513823 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.535458 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.550665 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.564489 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.579436 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.594627 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.609200 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.611179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.611210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.611222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.611240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.611253 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.625842 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.639830 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.653804 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.670976 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.682501 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.693667 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.693932 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:37:38.693916552 +0000 UTC m=+96.671807830 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.695495 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.713954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.713994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.714005 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.714022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.714034 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.720510 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:53Z\\\",\\\"message\\\":\\\"rk=default : 1.252052ms\\\\nI1121 13:36:53.324221 6274 services_controller.go:356] Processing sync for service openshift-operator-lifecycle-manager/package-server-manager-metrics for network=default\\\\nI1121 13:36:53.324140 6274 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 13:36:53.324284 6274 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:36:53.324322 6274 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 13:36:53.324338 6274 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.731570 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:06Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.794611 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.794656 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.794676 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.794695 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794762 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794812 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794827 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794837 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794850 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:38.794828973 +0000 UTC m=+96.772720231 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794877 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:38.794865084 +0000 UTC m=+96.772756342 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794916 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794924 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794930 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.794948 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:38.794942396 +0000 UTC m=+96.772833654 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.795002 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.795021 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:38.795016028 +0000 UTC m=+96.772907286 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.817079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.817124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.817133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.817149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.817159 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.920459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.920516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.920527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.920541 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.920551 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:06Z","lastTransitionTime":"2025-11-21T13:37:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.981673 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.981723 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:06 crc kubenswrapper[4721]: I1121 13:37:06.981697 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.981846 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.981970 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:06 crc kubenswrapper[4721]: E1121 13:37:06.982122 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.026118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.026501 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.026646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.026788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.026964 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.130330 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.130371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.130381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.130396 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.130406 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.232997 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.233042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.233055 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.233089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.233100 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.335807 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.335845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.335856 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.335874 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.335885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.439762 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.439812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.439824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.439843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.439855 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.468358 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/2.log" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.469110 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/1.log" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.473313 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b" exitCode=1 Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.473357 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.473456 4721 scope.go:117] "RemoveContainer" containerID="ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.479911 4721 scope.go:117] "RemoveContainer" containerID="1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b" Nov 21 13:37:07 crc kubenswrapper[4721]: E1121 13:37:07.480393 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.508834 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ccefa2fffb83a7e5ce126b21a398d3d83b9e85b93cbd6379be3e3e95303f2400\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:36:53Z\\\",\\\"message\\\":\\\"rk=default : 1.252052ms\\\\nI1121 13:36:53.324221 6274 services_controller.go:356] Processing sync for service openshift-operator-lifecycle-manager/package-server-manager-metrics for network=default\\\\nI1121 13:36:53.324140 6274 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 13:36:53.324284 6274 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]} options:{GoMap:map[iface-id-ver:9d751cbb-f2e2-430d-9754-c882a5e924a5 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:3b 10.217.0.59]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {960d98b2-dc64-4e93-a4b6-9b19847af71e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:36:53.324322 6274 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 13:36:53.324338 6274 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:52Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.525834 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.544958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.545115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.545139 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.545206 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.545259 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.551438 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.581927 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.606003 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.631878 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.649361 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.649778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.649836 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.649855 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.649880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.649900 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.667790 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.687761 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.707539 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.726897 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.744666 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.752976 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.753093 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.753108 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.753128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.753141 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.764808 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.784012 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.803436 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.819604 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.831852 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:07Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.855942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.855990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.855999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.856014 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.856024 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.960042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.960101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.960111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.960129 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.960141 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:07Z","lastTransitionTime":"2025-11-21T13:37:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:07 crc kubenswrapper[4721]: I1121 13:37:07.981511 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:07 crc kubenswrapper[4721]: E1121 13:37:07.981637 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.062729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.062766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.062777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.062793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.062804 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.165540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.165934 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.165945 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.165963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.165978 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.268454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.268508 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.268521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.268540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.268553 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.372177 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.372265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.372287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.372318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.372341 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.475524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.475557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.475566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.475581 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.475590 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.479992 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/2.log" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.485740 4721 scope.go:117] "RemoveContainer" containerID="1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b" Nov 21 13:37:08 crc kubenswrapper[4721]: E1121 13:37:08.486028 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.503172 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.529694 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.551937 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.575020 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.580151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.580226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.580247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.580281 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.580304 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.592989 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.616968 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.635962 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.652707 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.669518 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.683514 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.683549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.683559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.683577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.683590 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.685798 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.701210 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.716032 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.731977 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.745811 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.763042 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.777136 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.786239 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.786284 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.786297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.786314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.786326 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.790636 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:08Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.889708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.889760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.889775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.889796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.889811 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.980846 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.980878 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.980950 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:08 crc kubenswrapper[4721]: E1121 13:37:08.980975 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:08 crc kubenswrapper[4721]: E1121 13:37:08.981111 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:08 crc kubenswrapper[4721]: E1121 13:37:08.981216 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.992320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.992357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.992371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.992386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:08 crc kubenswrapper[4721]: I1121 13:37:08.992395 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:08Z","lastTransitionTime":"2025-11-21T13:37:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.095640 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.095691 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.095702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.095720 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.095730 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.199129 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.199216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.199243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.199282 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.199313 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.308138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.308204 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.308224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.308249 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.308268 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.411381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.411419 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.411430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.411445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.411461 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.514648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.514732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.514755 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.514802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.514825 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.616994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.617051 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.617079 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.617098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.617110 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.719577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.719610 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.719619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.719634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.719644 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.822768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.822810 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.822822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.822842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.822857 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.925752 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.925790 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.925801 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.925817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.925827 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:09Z","lastTransitionTime":"2025-11-21T13:37:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:09 crc kubenswrapper[4721]: I1121 13:37:09.980878 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:09 crc kubenswrapper[4721]: E1121 13:37:09.981085 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.028826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.028884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.028905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.028932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.028953 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.132211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.132258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.132268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.132290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.132300 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.235044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.235189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.235209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.235685 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.235741 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.339132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.339176 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.339191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.339214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.339231 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.442789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.442875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.442894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.442921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.442941 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.545588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.545661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.545680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.545706 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.545724 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.648716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.648780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.648796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.648820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.648838 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.752295 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.752360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.752370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.752397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.752410 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.855405 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.855455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.855466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.855483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.855493 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.958466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.958567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.958586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.958616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.958636 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:10Z","lastTransitionTime":"2025-11-21T13:37:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.981423 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.981483 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:10 crc kubenswrapper[4721]: E1121 13:37:10.981543 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:10 crc kubenswrapper[4721]: I1121 13:37:10.981498 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:10 crc kubenswrapper[4721]: E1121 13:37:10.981665 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:10 crc kubenswrapper[4721]: E1121 13:37:10.981766 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.061340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.061393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.061404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.061429 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.061443 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.164321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.164369 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.164383 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.164403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.164419 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.267616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.267671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.267689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.267719 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.267736 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.370258 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.370316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.370326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.370347 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.370361 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.473464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.473525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.473535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.473558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.473570 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.576163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.576219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.576229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.576251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.576271 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.678464 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.678522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.678535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.678557 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.678618 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.782913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.782980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.782998 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.783029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.783051 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.886811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.886884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.886902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.886942 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.886977 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.981503 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:11 crc kubenswrapper[4721]: E1121 13:37:11.982091 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.988943 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.988991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.989003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.989020 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:11 crc kubenswrapper[4721]: I1121 13:37:11.989033 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:11Z","lastTransitionTime":"2025-11-21T13:37:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.091445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.091490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.091503 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.091519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.091530 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.194625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.194885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.194907 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.194939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.194953 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.298008 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.298075 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.298090 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.298110 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.298126 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.400411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.400457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.400468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.400486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.400497 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.502226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.502271 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.502286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.502305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.502317 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.607468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.607515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.607529 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.607551 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.607562 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.710527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.710568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.710577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.710595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.710604 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.813672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.813735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.813754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.813780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.813797 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.916474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.916511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.916520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.916535 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.916544 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:12Z","lastTransitionTime":"2025-11-21T13:37:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.981158 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.981207 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:12 crc kubenswrapper[4721]: E1121 13:37:12.981347 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:12 crc kubenswrapper[4721]: I1121 13:37:12.981652 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:12 crc kubenswrapper[4721]: E1121 13:37:12.981758 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:12 crc kubenswrapper[4721]: E1121 13:37:12.982045 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.000468 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:12Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.016299 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.020571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.020627 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.020644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.020669 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.020686 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.030236 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.044497 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.058974 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.071893 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.085331 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.101105 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.114960 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.125318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.125370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.125381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.125402 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.125416 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.130386 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.155504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.170102 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.183450 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.199863 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.214925 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.227878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.227937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.227951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.227968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.227978 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.233582 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.246789 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:13Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.330169 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.330222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.330236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.330257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.330275 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.433412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.433475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.433489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.433517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.433532 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.535831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.535908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.535916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.535929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.535938 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.638118 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.638172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.638188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.638207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.638221 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.746588 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.746780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.746791 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.747243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.747268 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.851040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.851113 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.851127 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.851148 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.851160 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.954482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.954540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.954552 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.954576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.954591 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:13Z","lastTransitionTime":"2025-11-21T13:37:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:13 crc kubenswrapper[4721]: I1121 13:37:13.981088 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:13 crc kubenswrapper[4721]: E1121 13:37:13.981295 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.058783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.058869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.058881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.058904 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.058917 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.162586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.162642 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.162659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.162678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.162694 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.265835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.265872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.265881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.265894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.265903 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.369187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.369254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.369277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.369306 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.369328 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.471964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.472091 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.472114 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.472145 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.472329 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.576296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.576371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.576393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.576425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.576448 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.679058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.679242 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.679262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.679282 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.679297 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.783615 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.783688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.783705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.783732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.783751 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.886834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.886872 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.886883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.886902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.886917 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.981130 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.981211 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:14 crc kubenswrapper[4721]: E1121 13:37:14.981294 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.981140 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:14 crc kubenswrapper[4721]: E1121 13:37:14.981408 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:14 crc kubenswrapper[4721]: E1121 13:37:14.981589 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.989310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.989362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.989385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.989414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:14 crc kubenswrapper[4721]: I1121 13:37:14.989437 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:14Z","lastTransitionTime":"2025-11-21T13:37:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.092247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.092318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.092339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.092364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.092383 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.195636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.195726 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.195746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.195779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.195799 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.299548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.299591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.299602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.299620 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.299660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.402782 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.402825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.402834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.402852 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.402864 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.506456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.506510 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.506521 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.506542 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.506557 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.610128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.610202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.610221 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.610254 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.610276 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.713210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.713251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.713261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.713277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.713288 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.816506 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.816580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.816594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.816616 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.816630 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.919768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.919878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.919898 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.919930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.919950 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:15Z","lastTransitionTime":"2025-11-21T13:37:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:15 crc kubenswrapper[4721]: I1121 13:37:15.981472 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:15 crc kubenswrapper[4721]: E1121 13:37:15.981640 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.022149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.022243 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.022296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.022320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.022338 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.124696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.124767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.124781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.124808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.124874 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.228022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.228141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.228159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.228187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.228213 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.331866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.332592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.332796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.333041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.333229 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.436955 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.437046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.437105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.437138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.437156 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.539993 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.540047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.540072 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.540088 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.540110 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.595579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.595618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.595629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.595643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.595652 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.614448 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:16Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.619266 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.619298 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.619307 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.619328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.619337 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.641721 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:16Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.647460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.647506 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.647522 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.647546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.647564 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.689736 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:16Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.694292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.694349 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.694364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.694387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.694403 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.719233 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:16Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.723430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.723473 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.723484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.723506 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.723517 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.736283 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:16Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.736396 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.738036 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.738055 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.738082 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.738094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.738103 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.840317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.840402 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.840417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.840440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.840455 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.944208 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.944298 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.944357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.944393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.944418 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:16Z","lastTransitionTime":"2025-11-21T13:37:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.981222 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.981286 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:16 crc kubenswrapper[4721]: I1121 13:37:16.981324 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.981456 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.981609 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:16 crc kubenswrapper[4721]: E1121 13:37:16.981834 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.048202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.048262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.048276 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.048296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.048311 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.151195 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.151270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.151288 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.151318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.151339 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.254401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.254460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.254480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.254504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.254521 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.357667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.357716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.357727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.357745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.357759 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.460885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.460944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.460958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.460979 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.460994 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.563648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.563694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.563705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.563721 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.563735 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.666955 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.667010 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.667032 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.667057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.667111 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.770294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.770341 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.770352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.770368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.770379 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.872982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.873027 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.873046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.873098 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.873117 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.977252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.977323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.977338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.977361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.977376 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:17Z","lastTransitionTime":"2025-11-21T13:37:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:17 crc kubenswrapper[4721]: I1121 13:37:17.980920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:17 crc kubenswrapper[4721]: E1121 13:37:17.981258 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.080307 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.080370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.080385 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.080404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.080418 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.182561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.182663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.182683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.182724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.182764 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.287002 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.287124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.287151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.287184 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.287208 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.390339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.390412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.390431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.390454 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.390472 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.493410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.493478 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.493490 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.493513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.493527 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.596938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.597006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.597024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.597052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.597098 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.700967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.701019 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.701029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.701050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.701080 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.804132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.805213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.805260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.805290 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.805320 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.907780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.907841 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.907854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.907875 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.907888 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:18Z","lastTransitionTime":"2025-11-21T13:37:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.981264 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.981290 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:18 crc kubenswrapper[4721]: E1121 13:37:18.981424 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:18 crc kubenswrapper[4721]: I1121 13:37:18.981522 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:18 crc kubenswrapper[4721]: E1121 13:37:18.981622 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:18 crc kubenswrapper[4721]: E1121 13:37:18.981839 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.011105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.011159 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.011173 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.011198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.011215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.115434 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.115517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.115537 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.115566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.115585 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.219116 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.219185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.219205 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.219236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.219259 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.322757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.322881 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.322899 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.322923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.322941 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.426599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.426658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.426674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.426692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.426705 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.529820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.529877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.529891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.529910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.529924 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.632609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.632684 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.632709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.632742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.632761 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.736334 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.736403 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.736424 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.736448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.736465 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.747907 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:19 crc kubenswrapper[4721]: E1121 13:37:19.748164 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:37:19 crc kubenswrapper[4721]: E1121 13:37:19.748276 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:37:51.748253383 +0000 UTC m=+109.726144731 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.839466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.839511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.839523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.839540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.839552 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.944038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.944117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.944133 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.944164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.944347 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:19Z","lastTransitionTime":"2025-11-21T13:37:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:19 crc kubenswrapper[4721]: I1121 13:37:19.980757 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:19 crc kubenswrapper[4721]: E1121 13:37:19.980932 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.048189 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.048277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.048297 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.048325 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.048346 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.152124 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.152202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.152219 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.152244 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.152264 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.255022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.255095 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.255109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.255130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.255148 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.359444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.359523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.359543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.359575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.359594 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.462702 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.462766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.462783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.462808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.462829 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.566253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.566329 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.566388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.566412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.566422 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.669203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.669278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.669292 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.669316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.669329 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.772802 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.772853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.772867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.772896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.772911 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.876614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.876697 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.876722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.876757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.876784 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.980574 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.980631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.980644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.980715 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:20 crc kubenswrapper[4721]: E1121 13:37:20.980881 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.980967 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:20 crc kubenswrapper[4721]: E1121 13:37:20.981049 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.980709 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:20 crc kubenswrapper[4721]: E1121 13:37:20.981223 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.981296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.981374 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:20Z","lastTransitionTime":"2025-11-21T13:37:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:20 crc kubenswrapper[4721]: I1121 13:37:20.982593 4721 scope.go:117] "RemoveContainer" containerID="1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b" Nov 21 13:37:20 crc kubenswrapper[4721]: E1121 13:37:20.982820 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.085268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.085331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.085349 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.085375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.085393 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.189262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.189368 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.189432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.189465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.189489 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.293949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.294509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.294629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.294705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.294773 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.397644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.398021 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.398192 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.398371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.398503 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.502352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.502775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.502974 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.503157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.503311 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.605963 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.606001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.606011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.606029 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.606040 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.709038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.709120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.709138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.709162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.709177 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.811885 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.811941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.811958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.811983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.812000 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.914993 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.915109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.915135 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.915164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.915189 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:21Z","lastTransitionTime":"2025-11-21T13:37:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:21 crc kubenswrapper[4721]: I1121 13:37:21.981113 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:21 crc kubenswrapper[4721]: E1121 13:37:21.981302 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.018210 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.018274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.018289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.018311 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.018325 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.120797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.120846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.120859 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.120877 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.120891 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.222869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.222913 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.222924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.222941 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.222954 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.325989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.326437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.326595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.326757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.326882 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.431011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.431456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.431527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.431617 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.431685 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.533607 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.533693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.533716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.533745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.533771 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.637382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.637462 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.637483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.637509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.637526 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.741476 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.741958 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.742196 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.742363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.742542 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.845893 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.846400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.846634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.846765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.846904 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.950342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.950413 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.950425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.950444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.950455 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:22Z","lastTransitionTime":"2025-11-21T13:37:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.981163 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.981229 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:22 crc kubenswrapper[4721]: E1121 13:37:22.981298 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:22 crc kubenswrapper[4721]: E1121 13:37:22.981405 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:22 crc kubenswrapper[4721]: I1121 13:37:22.981796 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:22 crc kubenswrapper[4721]: E1121 13:37:22.982005 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.007374 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.024026 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.038924 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.052690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.052749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.052767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.052792 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.052813 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.055503 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.080804 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.103995 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.125651 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.137349 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.155442 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.155693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.155727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.155735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.155753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.155763 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.172824 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.186730 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.200663 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.220522 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.238719 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.258879 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.260466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.260536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.260553 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.260575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.260589 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.281474 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.302869 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:23Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.363202 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.363264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.363282 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.363309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.363323 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.466987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.467733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.467749 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.467805 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.467821 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.571267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.571341 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.571366 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.571399 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.571425 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.674387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.674431 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.674443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.674461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.674473 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.777555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.777629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.777651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.777681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.777705 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.880705 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.880778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.880797 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.880825 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.880846 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.981238 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:23 crc kubenswrapper[4721]: E1121 13:37:23.981437 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.983422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.983495 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.983516 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.983544 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:23 crc kubenswrapper[4721]: I1121 13:37:23.983561 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:23Z","lastTransitionTime":"2025-11-21T13:37:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.085924 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.085990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.086011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.086038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.086087 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.188969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.189053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.189132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.189185 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.189215 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.292828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.292876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.292892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.292914 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.292929 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.396236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.396321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.396331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.396349 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.396361 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.500770 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.500829 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.500846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.500871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.500889 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.603938 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.604028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.604060 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.604138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.604166 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.707261 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.707381 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.707406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.707460 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.707479 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.810663 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.810775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.810799 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.810830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.810852 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.914844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.914962 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.914991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.915032 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.915114 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:24Z","lastTransitionTime":"2025-11-21T13:37:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.981582 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.981651 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:24 crc kubenswrapper[4721]: E1121 13:37:24.981772 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:24 crc kubenswrapper[4721]: I1121 13:37:24.981816 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:24 crc kubenswrapper[4721]: E1121 13:37:24.981997 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:24 crc kubenswrapper[4721]: E1121 13:37:24.982231 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.019035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.019144 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.019163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.019227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.019250 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.123322 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.123437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.123472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.123511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.123533 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.227374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.227419 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.227430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.227449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.227463 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.330523 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.330578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.330596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.330618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.330635 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.436498 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.436571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.436592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.436625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.436645 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.539980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.540044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.540058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.540132 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.540148 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.643257 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.643319 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.643339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.643373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.643392 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.746884 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.746946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.746965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.746991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.747010 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.849747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.849823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.849840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.849871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.849892 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.953819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.953901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.953927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.953959 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.953984 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:25Z","lastTransitionTime":"2025-11-21T13:37:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:25 crc kubenswrapper[4721]: I1121 13:37:25.981346 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:25 crc kubenswrapper[4721]: E1121 13:37:25.981535 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.057662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.057716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.057738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.057764 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.057782 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.161645 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.161748 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.161779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.161817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.161839 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.264840 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.264902 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.264921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.264949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.264970 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.367445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.367520 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.367540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.367566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.367589 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.471238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.471305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.471323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.471352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.471370 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.574783 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.574850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.574867 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.574896 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.574917 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.678763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.678828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.678849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.678871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.678886 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.782853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.782932 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.782951 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.782975 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.782988 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.886326 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.886404 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.886422 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.886451 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.886469 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.888395 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.888465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.888482 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.888515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.888541 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.910586 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:26Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.916387 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.916445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.916459 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.916484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.916499 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.936511 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:26Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.942693 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.942734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.942742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.942757 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.942767 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.960377 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:26Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.965729 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.965765 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.965833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.965851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.965861 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.981466 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.981655 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.981775 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.981797 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.981947 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.982141 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:26 crc kubenswrapper[4721]: E1121 13:37:26.982497 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:26Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.988465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.988496 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.988507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.988525 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:26 crc kubenswrapper[4721]: I1121 13:37:26.988537 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:26Z","lastTransitionTime":"2025-11-21T13:37:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: E1121 13:37:27.005703 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:27Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:27 crc kubenswrapper[4721]: E1121 13:37:27.005840 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.007927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.007956 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.007967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.007980 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.007990 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.110982 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.111030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.111041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.111058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.111115 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.214050 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.214163 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.214188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.214226 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.214252 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.318101 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.318175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.318231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.318262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.318357 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.421440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.421532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.421555 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.421589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.421611 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.524901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.524987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.525007 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.525034 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.525052 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.627548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.627603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.627624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.627654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.627675 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.730174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.730214 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.730227 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.730245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.730261 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.833618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.833696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.833712 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.833732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.833745 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.937591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.937660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.937687 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.937716 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.937738 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:27Z","lastTransitionTime":"2025-11-21T13:37:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:27 crc kubenswrapper[4721]: I1121 13:37:27.981034 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:27 crc kubenswrapper[4721]: E1121 13:37:27.981380 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.040760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.040820 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.040837 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.040866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.040885 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.143409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.143465 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.143477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.143497 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.143510 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.246106 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.246150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.246162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.246179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.246190 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.348930 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.349006 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.349024 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.349052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.349116 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.453461 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.453563 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.453586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.453625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.453661 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.557280 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.557347 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.557364 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.557391 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.557408 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.559822 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/0.log" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.559905 4721 generic.go:334] "Generic (PLEG): container finished" podID="15670f4b-cd59-4040-b6d5-87a7cf51d63f" containerID="1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69" exitCode=1 Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.559957 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerDied","Data":"1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.560623 4721 scope.go:117] "RemoveContainer" containerID="1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.581574 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.617182 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.630301 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.646639 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.660953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.661016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.661025 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.661040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.661050 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.666594 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.683581 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"2025-11-21T13:36:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49\\\\n2025-11-21T13:36:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49 to /host/opt/cni/bin/\\\\n2025-11-21T13:36:43Z [verbose] multus-daemon started\\\\n2025-11-21T13:36:43Z [verbose] Readiness Indicator file check\\\\n2025-11-21T13:37:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.697906 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.709554 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.723594 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.737201 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.753823 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.763970 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.764035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.764049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.764085 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.764098 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.768598 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.785049 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.800003 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.819294 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.841357 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.856949 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:28Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.867357 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.867430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.867449 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.867480 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.867497 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.971268 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.971321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.971338 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.971362 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.971381 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:28Z","lastTransitionTime":"2025-11-21T13:37:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.982229 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:28 crc kubenswrapper[4721]: E1121 13:37:28.982360 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.982588 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:28 crc kubenswrapper[4721]: E1121 13:37:28.982728 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:28 crc kubenswrapper[4721]: I1121 13:37:28.986481 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:28 crc kubenswrapper[4721]: E1121 13:37:28.986609 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.074613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.074695 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.074758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.074809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.074833 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.178003 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.178080 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.178094 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.178119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.178131 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.281547 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.281609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.281622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.281646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.281660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.384698 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.384756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.384767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.384789 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.384803 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.487843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.487894 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.487905 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.487923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.487935 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.566573 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/0.log" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.566728 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerStarted","Data":"03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.588651 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.592209 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.592321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.592378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.592408 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.592431 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.605614 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.628331 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.662648 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.681859 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.695655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.695727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.695746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.695777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.695795 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.697488 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.714358 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.738995 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.760164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"2025-11-21T13:36:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49\\\\n2025-11-21T13:36:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49 to /host/opt/cni/bin/\\\\n2025-11-21T13:36:43Z [verbose] multus-daemon started\\\\n2025-11-21T13:36:43Z [verbose] Readiness Indicator file check\\\\n2025-11-21T13:37:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.782210 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.799363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.799415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.799423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.799441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.799451 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.803683 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.821756 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.842604 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.870164 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.890704 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.902768 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.902828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.902849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.902876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.902896 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:29Z","lastTransitionTime":"2025-11-21T13:37:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.910457 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.930865 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:29Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:29 crc kubenswrapper[4721]: I1121 13:37:29.980609 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:29 crc kubenswrapper[4721]: E1121 13:37:29.980986 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.007554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.007916 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.008053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.008425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.008551 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.112264 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.112328 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.112346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.112371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.112392 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.216058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.216150 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.216167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.216186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.216203 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.319199 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.319286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.319308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.319336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.319356 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.423162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.423293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.423318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.423347 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.423369 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.526279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.526788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.526944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.526990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.527024 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.630823 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.631412 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.631778 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.631978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.632164 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.735800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.736299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.736543 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.736734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.736949 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.840111 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.840468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.840539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.840604 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.840665 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.943511 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.943850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.943950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.944041 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.944131 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:30Z","lastTransitionTime":"2025-11-21T13:37:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.981316 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.981459 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:30 crc kubenswrapper[4721]: I1121 13:37:30.981463 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:30 crc kubenswrapper[4721]: E1121 13:37:30.981617 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:30 crc kubenswrapper[4721]: E1121 13:37:30.981716 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:30 crc kubenswrapper[4721]: E1121 13:37:30.982374 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.000970 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.047584 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.048745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.048969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.049421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.049787 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.153876 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.154351 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.154619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.154830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.155059 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.258500 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.258566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.258578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.258598 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.258613 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.362572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.362633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.362650 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.362674 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.362691 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.465665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.465733 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.465747 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.465775 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.465792 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.569046 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.569128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.569141 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.569164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.569181 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.672759 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.672833 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.672850 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.672878 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.672899 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.777587 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.777657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.777681 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.777713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.777732 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.880730 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.880815 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.880834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.880866 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.880894 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.980836 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:31 crc kubenswrapper[4721]: E1121 13:37:31.981043 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.984374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.984445 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.984472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.984507 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:31 crc kubenswrapper[4721]: I1121 13:37:31.984533 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:31Z","lastTransitionTime":"2025-11-21T13:37:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.088370 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.088437 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.088457 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.088481 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.088503 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.192175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.192248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.192269 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.192293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.192310 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.296314 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.296378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.296395 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.296644 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.296666 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.400410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.400569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.400589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.400655 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.400672 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.503944 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.504001 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.504017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.504045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.504094 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.607483 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.607549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.607567 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.607592 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.607615 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.710671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.710737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.710754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.710780 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.710801 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.813821 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.813908 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.813933 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.813969 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.813993 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.917324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.917378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.917393 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.917414 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.917430 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:32Z","lastTransitionTime":"2025-11-21T13:37:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.981312 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.981407 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:32 crc kubenswrapper[4721]: E1121 13:37:32.981519 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:32 crc kubenswrapper[4721]: I1121 13:37:32.981541 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:32 crc kubenswrapper[4721]: E1121 13:37:32.981678 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:32 crc kubenswrapper[4721]: E1121 13:37:32.981769 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.002152 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfc48d5e-7dac-40ef-b4d0-e513b0dba81e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://785574da7816b494bfa29c702e49fb8d054908c7643b58b9cab04443d7f4044c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:32Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.020919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.020988 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.021011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.021043 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.021110 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.031453 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.049484 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.074457 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.108666 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.126021 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.127591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.127661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.127676 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.127703 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.127719 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.143520 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.166131 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.189126 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"2025-11-21T13:36:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49\\\\n2025-11-21T13:36:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49 to /host/opt/cni/bin/\\\\n2025-11-21T13:36:43Z [verbose] multus-daemon started\\\\n2025-11-21T13:36:43Z [verbose] Readiness Indicator file check\\\\n2025-11-21T13:37:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.213105 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.232057 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.235589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.235635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.235654 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.235677 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.235691 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.256109 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.278135 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.301387 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.319610 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.335285 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.338287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.338309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.338316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.338344 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.338355 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.348522 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.365481 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:33Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.441187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.441234 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.441252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.441277 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.441295 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.544594 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.544636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.544649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.544667 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.544679 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.647569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.647634 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.647651 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.647675 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.647736 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.751358 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.751421 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.751430 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.751452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.751465 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.855011 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.855053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.855086 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.855103 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.855116 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.958053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.958154 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.958172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.958198 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.958216 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:33Z","lastTransitionTime":"2025-11-21T13:37:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:33 crc kubenswrapper[4721]: I1121 13:37:33.981026 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:33 crc kubenswrapper[4721]: E1121 13:37:33.981263 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.061671 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.061722 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.061746 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.061777 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.061799 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.164287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.164361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.164386 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.164415 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.164439 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.267444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.267517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.267540 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.267572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.267594 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.370795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.370843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.370853 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.370869 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.370879 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.473996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.474119 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.474138 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.474167 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.474186 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.577919 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.577994 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.578017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.578047 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.578101 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.680699 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.680769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.680809 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.680843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.680868 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.783576 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.783639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.783657 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.783686 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.783717 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.886846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.886911 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.886928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.886952 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.886969 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.980991 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:34 crc kubenswrapper[4721]: E1121 13:37:34.981341 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.981417 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.981434 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:34 crc kubenswrapper[4721]: E1121 13:37:34.981609 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:34 crc kubenswrapper[4721]: E1121 13:37:34.982413 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.982954 4721 scope.go:117] "RemoveContainer" containerID="1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.993603 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.993672 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.993689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.993718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:34 crc kubenswrapper[4721]: I1121 13:37:34.993741 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:34Z","lastTransitionTime":"2025-11-21T13:37:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.097939 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.098566 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.098586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.098619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.098639 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.202161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.202265 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.202320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.202353 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.202373 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.305886 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.305968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.305990 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.306089 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.306113 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.408339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.408400 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.408417 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.408441 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.408460 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.510751 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.510800 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.510811 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.510828 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.510840 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.591808 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/2.log" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.595638 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.596215 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.614531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.614600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.614618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.614646 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.614662 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.618003 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.637232 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"2025-11-21T13:36:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49\\\\n2025-11-21T13:36:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49 to /host/opt/cni/bin/\\\\n2025-11-21T13:36:43Z [verbose] multus-daemon started\\\\n2025-11-21T13:36:43Z [verbose] Readiness Indicator file check\\\\n2025-11-21T13:37:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.660482 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.674254 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.693147 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.709378 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.716996 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.717042 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.717054 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.717087 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.717101 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.727598 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.744504 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.762774 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.781287 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.803828 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.820925 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.820991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.821017 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.821053 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.821136 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.838669 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.862677 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.883938 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.903036 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfc48d5e-7dac-40ef-b4d0-e513b0dba81e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://785574da7816b494bfa29c702e49fb8d054908c7643b58b9cab04443d7f4044c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.923647 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.923683 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.923694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.923708 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.923720 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:35Z","lastTransitionTime":"2025-11-21T13:37:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.938826 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.952689 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.964785 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:35Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:35 crc kubenswrapper[4721]: I1121 13:37:35.981228 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:35 crc kubenswrapper[4721]: E1121 13:37:35.981381 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.026838 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.026991 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.027004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.027018 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.027028 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.129960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.130004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.130013 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.130030 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.130041 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.234188 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.234232 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.234245 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.234263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.234274 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.336409 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.336474 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.336492 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.336517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.336539 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.439037 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.439161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.439187 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.439216 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.439240 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.542618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.542713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.542737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.542769 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.542791 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.602833 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/3.log" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.603869 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/2.log" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.607689 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" exitCode=1 Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.607762 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.607822 4721 scope.go:117] "RemoveContainer" containerID="1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.608970 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:37:36 crc kubenswrapper[4721]: E1121 13:37:36.609359 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.632274 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.645696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.645763 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.645788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.645819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.645844 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.656581 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.677350 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.695876 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.719829 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.742550 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.751310 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.751373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.751390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.751416 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.751438 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.765112 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.782166 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.806183 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfc48d5e-7dac-40ef-b4d0-e513b0dba81e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://785574da7816b494bfa29c702e49fb8d054908c7643b58b9cab04443d7f4044c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.829686 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.849015 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.854578 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.854622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.854633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.854652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.854667 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.881478 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f2a38d97633cabc204cdbaa666190c699d4409a9645f3794de4c8bb3f7e7c4b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:06Z\\\",\\\"message\\\":\\\"e-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827568 6452 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-network-console/networking-console-plugin]} name:Service_openshift-network-console/networking-console-plugin_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.246:9443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {ab0b1d51-5ec6-479b-8881-93dfa8d30337}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 13:37:06.827807 6452 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1121 13:37:06.827895 6452 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:36Z\\\",\\\"message\\\":\\\"l\\\\nI1121 13:37:36.358050 6796 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1121 13:37:36.358138 6796 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 13:37:36.358234 6796 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1121 13:37:36.358286 6796 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 13:37:36.358351 6796 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 13:37:36.358393 6796 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 13:37:36.358418 6796 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 13:37:36.358420 6796 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 13:37:36.358416 6796 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:37:36.358498 6796 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:37:36.358538 6796 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 13:37:36.358629 6796 factory.go:656] Stopping watch factory\\\\nI1121 13:37:36.358668 6796 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1121 13:37:36.358703 6796 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 13:37:36.358737 6796 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 13:37:36.358772 6796 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.897999 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.918189 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.932281 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.951570 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.958484 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.958772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.958965 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.959164 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.959312 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:36Z","lastTransitionTime":"2025-11-21T13:37:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.972375 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.981174 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.981232 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.981265 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:36 crc kubenswrapper[4721]: E1121 13:37:36.981371 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:36 crc kubenswrapper[4721]: E1121 13:37:36.981587 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:36 crc kubenswrapper[4721]: E1121 13:37:36.981812 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:36 crc kubenswrapper[4721]: I1121 13:37:36.992915 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"2025-11-21T13:36:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49\\\\n2025-11-21T13:36:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49 to /host/opt/cni/bin/\\\\n2025-11-21T13:36:43Z [verbose] multus-daemon started\\\\n2025-11-21T13:36:43Z [verbose] Readiness Indicator file check\\\\n2025-11-21T13:37:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:36Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.063182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.063246 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.063267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.063293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.063315 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.166458 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.166539 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.166561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.166589 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.166639 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.265382 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.265447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.265463 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.265485 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.265500 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.286119 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.290519 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.290561 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.290572 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.290591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.290605 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.305927 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.310847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.310949 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.311012 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.311040 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.311156 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.326334 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.331766 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.331824 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.331845 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.331870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.331891 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.347660 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.352977 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.353026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.353038 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.353057 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.353098 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.373293 4721 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"d6dd8ca9-77b6-40e9-b340-d759ee2aceeb\\\",\\\"systemUUID\\\":\\\"5536c4e5-1f28-40b2-9c33-e4b399b3bfd8\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.373648 4721 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.376182 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.376259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.376286 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.376316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.376337 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.479339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.479388 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.479406 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.479432 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.479449 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.582760 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.582830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.582847 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.582873 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.582894 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.614101 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/3.log" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.620908 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.621533 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.638743 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-7gx9d" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b703fc8f-138e-4ba9-9e08-1e9d718e13e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40226b9777a42a16a2e71f2da617774e992c4ded4e638aee70fe3f62810b915e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td99q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:37Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-7gx9d\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.658438 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.682293 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.686052 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.686120 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.686130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.686146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.686158 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.695847 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hp6ft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"15670f4b-cd59-4040-b6d5-87a7cf51d63f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:37:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:28Z\\\",\\\"message\\\":\\\"2025-11-21T13:36:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49\\\\n2025-11-21T13:36:42+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7b84d057-aee8-45a5-8c6b-b6c8df46ea49 to /host/opt/cni/bin/\\\\n2025-11-21T13:36:43Z [verbose] multus-daemon started\\\\n2025-11-21T13:36:43Z [verbose] Readiness Indicator file check\\\\n2025-11-21T13:37:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:37:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pwv9f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hp6ft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.716088 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"553d849b-ea1f-436e-aa8f-190203f6ab6e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e6363bd3bda49acccd90227648d0b7b6a5b6a3b133e159939a5cd49d6b44132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://49f9821b21909c08c1968fdb8ffa672069a8c7586766f102ab8e5afafd7bc303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://36285b776f40d1c5c62f53bfa16317ba64766831341863ca9ca8760a39c1fafa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9e6ca98ee57d585bb85108efa8958dc2847e0785d62e26d4a70d05cd3892415\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93cd13a40148cec3b50bb72c5bbc3b71ba2c9478377b748e8324d029883e5c58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46558cfba50a778b5235c791b7296f52927b412e8931e062369bf76207f9196e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3000b78f0eb71110c64aa000c46a3fdd13638ae9cf2de9b477ee38b1807491\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4bhjb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-x4hgm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.734830 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.749431 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe3943c5-1349-4275-bc45-25db418c7488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3208cfd8cdc15265b2a12398e81ee206e2db3af860f136950469664ec7060522\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9sbp6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-mstbk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.766683 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2fc06731-28f7-44c2-8132-c4e624f894fb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://140f031d650ee6def458a1e9ad0a861172452c3e08a76c9c8ec8527885f479de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8860330410ddb8396cd17c2c088aab77af192db167c654195ef51d9cd3ec0a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4sh6t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:46Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5nx85\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.789207 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.789251 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.789270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.789293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.789309 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.795189 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90e58e46-f038-423d-9d28-df20bd59a750\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dfbd112b4320fe78135ecc4efa8a162bcbb5882056847a2322ea1c12596c181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://354cdd4fddf5871781683f8edc515bcedac156dfdd984da175c222e661666a85\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f175410309a3f91ae94ff6043821c19bd0f327a68605c77d29341b22782e3335\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19faa9138f4a1c1804b9df992d69b12713754654dc3d79d2018b0295dca140fa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ede25ca6eaa503b938146f595af6531b255b2ee7823b5f87c58d364f393b3b8d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T13:36:28Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1121 13:36:22.670859 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 13:36:22.671710 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4026395510/tls.crt::/tmp/serving-cert-4026395510/tls.key\\\\\\\"\\\\nI1121 13:36:28.321163 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 13:36:28.340554 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 13:36:28.340606 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 13:36:28.340669 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 13:36:28.340684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 13:36:28.350305 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 13:36:28.350355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350369 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 13:36:28.350382 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 13:36:28.350394 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 13:36:28.350403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 13:36:28.350414 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 13:36:28.350803 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 13:36:28.355268 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:12Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://53dd4ae2a64199e72d3c69172ff01984ddb8f286f1d37752941b0df1f795c4c8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:09Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://696ed55fc8f831e36de14df6929433926fbb638384853ed6a72e31525926b760\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.818022 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d9d313ec-1bf9-421e-ae84-c928875cc48f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7583754571b6b2ac51c28ed14be499ee71f1d5177d76cb60ebbf6eeae94da9ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c859ab0077cdf996333153e5d45f5f9524e5c9a90ea7c7ffb0c9e8cb8aa62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e33e75fe55b0aa5082c0b3fc25fc580355f96d55da5e7fd822a0779a85dc0ccd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa037f2b533d4ebd6f790bfcfc296ddf654d33436d80e83dbd082de6fcbd355\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.837342 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4b30e0e-0193-4468-8697-d77352d6b1c0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd8a7ea0eb3f0e2449376f55f6277cbf250e1fe9b12334ae10863a67403e1964\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24fa51f85d2dad25dafaa1bc01b6fa4d04e38200dceb5a57f28b5fcac97dc2e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c31bd83ed26e48907b284aba52462ec0e17a41af0ab24b310046c41d73234689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f421102f28714ed60208d64fb106edbf6efaec351c7b3532a692e5c63fe12773\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.855652 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8bb58b8caf39c548b6f4d24b5bfb9493eaac28c5502a443a61d495b329ba0b35\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.871711 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cfc48d5e-7dac-40ef-b4d0-e513b0dba81e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://785574da7816b494bfa29c702e49fb8d054908c7643b58b9cab04443d7f4044c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f18b0646fc8b40fc42cda80064f163d7bcd20e97b02b12e956d03754af85263d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.892305 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.892359 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.892375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.892396 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.892413 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.893229 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b698ab7e7ad318e2dcfce53f33eb4d762de82929d7243017d209c912b607968\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://637087436e1cf9ddb1cf3f397fd81e7db80e6b7b3bca84fed516eaf8ca02639e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.910029 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tv49z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4dd2533b-aa3f-44c0-aff1-3a13c844ad5a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6a779854e74e3c450f6b5b556ff727c9c8833a8aa48f96c705e6f6e0aea1fc4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cc54c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tv49z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.933949 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ed6957711572dc390d6d09128e0cc4fa24242dfed62d4fb6a3102ce443b92d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.967289 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:34Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T13:37:36Z\\\",\\\"message\\\":\\\"l\\\\nI1121 13:37:36.358050 6796 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1121 13:37:36.358138 6796 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 13:37:36.358234 6796 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1121 13:37:36.358286 6796 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 13:37:36.358351 6796 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 13:37:36.358393 6796 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 13:37:36.358418 6796 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 13:37:36.358420 6796 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 13:37:36.358416 6796 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 13:37:36.358498 6796 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 13:37:36.358538 6796 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 13:37:36.358629 6796 factory.go:656] Stopping watch factory\\\\nI1121 13:37:36.358668 6796 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1121 13:37:36.358703 6796 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 13:37:36.358737 6796 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 13:37:36.358772 6796 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T13:37:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T13:36:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T13:36:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T13:36:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xzjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:34Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x6mh9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.981469 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:37 crc kubenswrapper[4721]: E1121 13:37:37.981677 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.985689 4721 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e613396d-3bd5-4a6f-8f9a-507f40954ee6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T13:36:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-49gjj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T13:36:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sr6gv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T13:37:37Z is after 2025-08-24T17:21:41Z" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.994830 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.994910 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.994935 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.994964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:37 crc kubenswrapper[4721]: I1121 13:37:37.994981 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:37Z","lastTransitionTime":"2025-11-21T13:37:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.098564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.098633 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.098652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.098678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.098696 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.201661 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.201724 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.201742 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.201767 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.201786 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.305515 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.305571 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.305593 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.305624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.305644 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.408229 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.408296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.408317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.408342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.408361 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.511957 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.512023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.512044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.512109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.512138 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.615247 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.615332 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.615351 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.615378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.615397 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.718630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.718694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.718717 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.718745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.718767 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.779042 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.779303 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.779266548 +0000 UTC m=+160.757157836 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.822433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.822524 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.822546 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.822575 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.822601 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.880505 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.880583 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.880661 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.880706 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.880879 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.880905 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.880923 4721 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.880989 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.880966372 +0000 UTC m=+160.858857670 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881359 4721 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881436 4721 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881441 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.881417854 +0000 UTC m=+160.859309152 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881574 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.881511587 +0000 UTC m=+160.859402885 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881373 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881629 4721 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881652 4721 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.881713 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.881693482 +0000 UTC m=+160.859584790 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.925373 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.925448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.925471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.925502 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.925524 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:38Z","lastTransitionTime":"2025-11-21T13:37:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.980747 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.980788 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.980901 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:38 crc kubenswrapper[4721]: I1121 13:37:38.981178 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.981270 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:38 crc kubenswrapper[4721]: E1121 13:37:38.981380 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.027879 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.028296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.028331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.028360 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.028385 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.132128 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.132263 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.132289 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.132320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.132344 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.236622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.236709 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.236727 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.236753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.236771 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.340758 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.340839 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.340858 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.340891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.340913 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.444452 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.444518 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.444536 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.444562 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.444581 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.548323 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.548427 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.548448 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.548475 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.548492 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.651411 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.651477 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.651493 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.651509 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.651522 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.755183 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.755260 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.755278 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.755304 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.755324 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.858213 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.858274 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.858291 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.858318 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.858335 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.961058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.961168 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.961186 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.961211 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.961231 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:39Z","lastTransitionTime":"2025-11-21T13:37:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:39 crc kubenswrapper[4721]: I1121 13:37:39.981538 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:39 crc kubenswrapper[4721]: E1121 13:37:39.981719 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.002152 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.066272 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.067157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.067486 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.067694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.067755 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.171456 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.171533 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.171550 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.171631 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.171658 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.275591 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.275649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.275665 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.275690 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.275707 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.378936 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.378999 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.379016 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.379039 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.379056 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.481577 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.481656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.481680 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.481704 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.481721 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.584678 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.584737 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.584849 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.584883 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.584903 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.687831 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.687880 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.687892 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.687909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.687921 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.791442 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.791513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.791534 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.791565 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.791587 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.894860 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.894923 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.894946 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.894972 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.894989 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.981509 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.981582 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.981515 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:40 crc kubenswrapper[4721]: E1121 13:37:40.981712 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:40 crc kubenswrapper[4721]: E1121 13:37:40.981831 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:40 crc kubenswrapper[4721]: E1121 13:37:40.982028 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.997753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.997808 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.997826 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.997854 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:40 crc kubenswrapper[4721]: I1121 13:37:40.997872 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:40Z","lastTransitionTime":"2025-11-21T13:37:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.101622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.101718 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.101788 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.101835 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.101853 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.205308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.205379 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.205397 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.205420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.205438 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.308564 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.308635 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.308660 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.308692 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.308715 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.412308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.412372 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.412390 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.412418 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.412436 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.515812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.515895 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.515921 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.515953 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.515978 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.619162 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.619220 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.619237 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.619259 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.619276 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.723115 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.723151 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.723161 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.723179 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.723189 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.826172 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.826267 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.826300 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.826333 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.826354 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.928960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.929028 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.929099 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.929131 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.929152 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:41Z","lastTransitionTime":"2025-11-21T13:37:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:41 crc kubenswrapper[4721]: I1121 13:37:41.981629 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:41 crc kubenswrapper[4721]: E1121 13:37:41.982030 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.032629 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.032735 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.032754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.032781 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.032799 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.135871 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.135929 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.135950 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.135978 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.135996 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.238356 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.238410 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.238425 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.238444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.238463 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.342361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.342443 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.342472 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.342504 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.342535 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.445559 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.445602 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.445618 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.445641 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.445658 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.548622 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.548658 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.548673 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.548694 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.548710 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.651738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.651796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.651817 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.651843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.651862 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.755320 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.755420 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.755440 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.755466 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.755484 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.858844 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.858909 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.858927 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.858954 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.858978 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.963180 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.963252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.963270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.963296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.963312 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:42Z","lastTransitionTime":"2025-11-21T13:37:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.981551 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.981683 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:42 crc kubenswrapper[4721]: I1121 13:37:42.981904 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:42 crc kubenswrapper[4721]: E1121 13:37:42.982003 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:42 crc kubenswrapper[4721]: E1121 13:37:42.982229 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:42 crc kubenswrapper[4721]: E1121 13:37:42.981875 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.057389 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-7gx9d" podStartSLOduration=70.057360217 podStartE2EDuration="1m10.057360217s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.03540777 +0000 UTC m=+101.013299078" watchObservedRunningTime="2025-11-21 13:37:43.057360217 +0000 UTC m=+101.035251515" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.067262 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.067596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.067796 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.067984 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.068213 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.139670 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-hp6ft" podStartSLOduration=70.139641124 podStartE2EDuration="1m10.139641124s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.108144287 +0000 UTC m=+101.086035635" watchObservedRunningTime="2025-11-21 13:37:43.139641124 +0000 UTC m=+101.117532412" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.169796 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-x4hgm" podStartSLOduration=70.16977073300001 podStartE2EDuration="1m10.169770733s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.140513737 +0000 UTC m=+101.118405035" watchObservedRunningTime="2025-11-21 13:37:43.169770733 +0000 UTC m=+101.147662031" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.170901 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.170968 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.170986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.171022 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.171100 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.206962 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podStartSLOduration=70.206938403 podStartE2EDuration="1m10.206938403s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.189637183 +0000 UTC m=+101.167528481" watchObservedRunningTime="2025-11-21 13:37:43.206938403 +0000 UTC m=+101.184829671" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.248033 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5nx85" podStartSLOduration=70.248005029 podStartE2EDuration="1m10.248005029s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.207849928 +0000 UTC m=+101.185741216" watchObservedRunningTime="2025-11-21 13:37:43.248005029 +0000 UTC m=+101.225896327" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.274140 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=62.274116869 podStartE2EDuration="1m2.274116869s" podCreationTimestamp="2025-11-21 13:36:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.249177081 +0000 UTC m=+101.227068349" watchObservedRunningTime="2025-11-21 13:37:43.274116869 +0000 UTC m=+101.252008167" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.274374 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.274423 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.274435 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.274455 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.274468 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.291679 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=69.291656776 podStartE2EDuration="1m9.291656776s" podCreationTimestamp="2025-11-21 13:36:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.275114136 +0000 UTC m=+101.253005424" watchObservedRunningTime="2025-11-21 13:37:43.291656776 +0000 UTC m=+101.269548044" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.311110 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.311089934 podStartE2EDuration="47.311089934s" podCreationTimestamp="2025-11-21 13:36:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.292456018 +0000 UTC m=+101.270347286" watchObservedRunningTime="2025-11-21 13:37:43.311089934 +0000 UTC m=+101.288981202" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.326900 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=13.326878403 podStartE2EDuration="13.326878403s" podCreationTimestamp="2025-11-21 13:37:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.326426841 +0000 UTC m=+101.304318109" watchObservedRunningTime="2025-11-21 13:37:43.326878403 +0000 UTC m=+101.304769701" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.359981 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=4.359957423 podStartE2EDuration="4.359957423s" podCreationTimestamp="2025-11-21 13:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.359677615 +0000 UTC m=+101.337568883" watchObservedRunningTime="2025-11-21 13:37:43.359957423 +0000 UTC m=+101.337848711" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.377891 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.377948 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.377964 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.377989 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.378007 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.409562 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-tv49z" podStartSLOduration=70.40953307 podStartE2EDuration="1m10.40953307s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:43.408532283 +0000 UTC m=+101.386423551" watchObservedRunningTime="2025-11-21 13:37:43.40953307 +0000 UTC m=+101.387424368" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.480670 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.480736 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.480753 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.480779 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.480799 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.583203 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.583593 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.583613 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.583639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.583660 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.686643 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.687309 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.687371 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.687401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.687420 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.790548 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.790614 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.790632 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.790659 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.790678 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.894231 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.894293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.894313 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.894339 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.894357 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.980662 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:43 crc kubenswrapper[4721]: E1121 13:37:43.980872 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.997236 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.997294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.997315 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.997340 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:43 crc kubenswrapper[4721]: I1121 13:37:43.997358 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:43Z","lastTransitionTime":"2025-11-21T13:37:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.100732 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.100795 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.100806 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.100834 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.100851 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.203928 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.204004 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.204023 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.204056 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.204111 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.307468 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.307538 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.307554 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.307580 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.307598 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.410444 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.410513 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.410531 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.410558 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.410577 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.513754 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.513822 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.513843 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.513870 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.513888 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.616656 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.616734 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.616756 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.616787 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.616808 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.719987 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.720049 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.720109 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.720137 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.720153 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.824153 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.824223 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.824242 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.824296 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.824330 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.927549 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.927609 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.927624 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.927649 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.927666 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:44Z","lastTransitionTime":"2025-11-21T13:37:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.981496 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.981632 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:44 crc kubenswrapper[4721]: I1121 13:37:44.981464 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:44 crc kubenswrapper[4721]: E1121 13:37:44.981708 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:44 crc kubenswrapper[4721]: E1121 13:37:44.981836 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:44 crc kubenswrapper[4721]: E1121 13:37:44.981928 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.030528 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.030583 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.030605 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.030630 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.030648 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.135238 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.135321 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.135341 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.135366 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.135389 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.238842 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.238940 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.238960 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.238983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.239001 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.344937 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.345331 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.345471 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.345623 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.345764 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.449447 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.449846 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.449986 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.450157 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.450312 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.553688 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.553772 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.553793 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.553827 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.553864 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.659527 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.659579 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.659595 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.659619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.659636 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.763488 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.763596 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.763625 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.763664 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.763697 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.866222 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.866293 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.866316 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.866348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.866370 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.970140 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.970224 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.970248 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.970287 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.970311 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:45Z","lastTransitionTime":"2025-11-21T13:37:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:45 crc kubenswrapper[4721]: I1121 13:37:45.981704 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:45 crc kubenswrapper[4721]: E1121 13:37:45.981907 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.074252 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.074308 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.074324 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.074348 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.074366 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.178652 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.178696 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.178713 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.178738 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.178755 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.281240 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.281299 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.281317 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.281342 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.281360 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.384302 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.384361 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.384378 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.384401 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.384421 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.487517 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.487569 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.487586 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.487611 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.487630 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.591174 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.591237 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.591256 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.591283 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.591304 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.694600 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.694682 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.694710 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.694745 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.694769 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.797998 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.798045 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.798058 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.798105 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.798117 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.901191 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.901253 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.901270 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.901294 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.901311 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:46Z","lastTransitionTime":"2025-11-21T13:37:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.981669 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.981761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:46 crc kubenswrapper[4721]: E1121 13:37:46.981918 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:46 crc kubenswrapper[4721]: E1121 13:37:46.982135 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:46 crc kubenswrapper[4721]: I1121 13:37:46.981973 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:46 crc kubenswrapper[4721]: E1121 13:37:46.982322 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.004983 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.005059 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.005117 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.005146 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.005170 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.108532 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.108599 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.108619 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.108648 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.108667 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.211967 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.212026 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.212044 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.212104 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.212125 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.315035 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.315130 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.315149 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.315175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.315201 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.419237 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.419346 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.419774 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.419851 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.420281 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.539568 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.539639 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.539662 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.539689 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.539709 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.643279 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.643336 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.643352 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.643375 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.643392 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.746175 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.746489 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.746636 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.746819 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.747006 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.763273 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.763345 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.763363 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.763812 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.763877 4721 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T13:37:47Z","lastTransitionTime":"2025-11-21T13:37:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.845313 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p"] Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.846400 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.850588 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.853442 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.853732 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.853787 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.981169 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:47 crc kubenswrapper[4721]: E1121 13:37:47.981341 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.999016 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/05853838-156e-482c-a8e8-b26e062da79a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.999175 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/05853838-156e-482c-a8e8-b26e062da79a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.999228 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/05853838-156e-482c-a8e8-b26e062da79a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.999273 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/05853838-156e-482c-a8e8-b26e062da79a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:47 crc kubenswrapper[4721]: I1121 13:37:47.999338 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05853838-156e-482c-a8e8-b26e062da79a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.101627 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/05853838-156e-482c-a8e8-b26e062da79a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.101708 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/05853838-156e-482c-a8e8-b26e062da79a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.101751 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/05853838-156e-482c-a8e8-b26e062da79a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.101798 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05853838-156e-482c-a8e8-b26e062da79a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.101828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/05853838-156e-482c-a8e8-b26e062da79a-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.102004 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/05853838-156e-482c-a8e8-b26e062da79a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.102209 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/05853838-156e-482c-a8e8-b26e062da79a-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.103471 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/05853838-156e-482c-a8e8-b26e062da79a-service-ca\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.116531 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/05853838-156e-482c-a8e8-b26e062da79a-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.133589 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/05853838-156e-482c-a8e8-b26e062da79a-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-wp66p\" (UID: \"05853838-156e-482c-a8e8-b26e062da79a\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.176532 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" Nov 21 13:37:48 crc kubenswrapper[4721]: W1121 13:37:48.199162 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05853838_156e_482c_a8e8_b26e062da79a.slice/crio-07796b05e6c77076582900cc674d923937a86f5a3c6f56916f9f312f488f1a9b WatchSource:0}: Error finding container 07796b05e6c77076582900cc674d923937a86f5a3c6f56916f9f312f488f1a9b: Status 404 returned error can't find the container with id 07796b05e6c77076582900cc674d923937a86f5a3c6f56916f9f312f488f1a9b Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.663476 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" event={"ID":"05853838-156e-482c-a8e8-b26e062da79a","Type":"ContainerStarted","Data":"da11ff23291c527f900cea759938609908feb272ec1adefd204ceb8b8ffcf84e"} Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.663544 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" event={"ID":"05853838-156e-482c-a8e8-b26e062da79a","Type":"ContainerStarted","Data":"07796b05e6c77076582900cc674d923937a86f5a3c6f56916f9f312f488f1a9b"} Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.686599 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-wp66p" podStartSLOduration=75.686563315 podStartE2EDuration="1m15.686563315s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:37:48.686203015 +0000 UTC m=+106.664094343" watchObservedRunningTime="2025-11-21 13:37:48.686563315 +0000 UTC m=+106.664454613" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.981157 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.981216 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:48 crc kubenswrapper[4721]: I1121 13:37:48.981169 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:48 crc kubenswrapper[4721]: E1121 13:37:48.981389 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:48 crc kubenswrapper[4721]: E1121 13:37:48.981530 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:48 crc kubenswrapper[4721]: E1121 13:37:48.981734 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:49 crc kubenswrapper[4721]: I1121 13:37:49.981109 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:49 crc kubenswrapper[4721]: E1121 13:37:49.981801 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:50 crc kubenswrapper[4721]: I1121 13:37:50.981568 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:50 crc kubenswrapper[4721]: I1121 13:37:50.981667 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:50 crc kubenswrapper[4721]: I1121 13:37:50.981666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:50 crc kubenswrapper[4721]: E1121 13:37:50.981769 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:50 crc kubenswrapper[4721]: E1121 13:37:50.982099 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:50 crc kubenswrapper[4721]: E1121 13:37:50.981942 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:51 crc kubenswrapper[4721]: I1121 13:37:51.839129 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:51 crc kubenswrapper[4721]: E1121 13:37:51.839278 4721 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:37:51 crc kubenswrapper[4721]: E1121 13:37:51.839346 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs podName:e613396d-3bd5-4a6f-8f9a-507f40954ee6 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:55.839328305 +0000 UTC m=+173.817219573 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs") pod "network-metrics-daemon-sr6gv" (UID: "e613396d-3bd5-4a6f-8f9a-507f40954ee6") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 13:37:51 crc kubenswrapper[4721]: I1121 13:37:51.981042 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:51 crc kubenswrapper[4721]: E1121 13:37:51.981271 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:51 crc kubenswrapper[4721]: I1121 13:37:51.982459 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:37:51 crc kubenswrapper[4721]: E1121 13:37:51.982749 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:37:52 crc kubenswrapper[4721]: I1121 13:37:52.980878 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:52 crc kubenswrapper[4721]: I1121 13:37:52.981045 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:52 crc kubenswrapper[4721]: E1121 13:37:52.983095 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:52 crc kubenswrapper[4721]: I1121 13:37:52.983149 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:52 crc kubenswrapper[4721]: E1121 13:37:52.983511 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:52 crc kubenswrapper[4721]: E1121 13:37:52.983313 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:53 crc kubenswrapper[4721]: I1121 13:37:53.981158 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:53 crc kubenswrapper[4721]: E1121 13:37:53.981579 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:54 crc kubenswrapper[4721]: I1121 13:37:54.980725 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:54 crc kubenswrapper[4721]: I1121 13:37:54.980801 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:54 crc kubenswrapper[4721]: E1121 13:37:54.980888 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:54 crc kubenswrapper[4721]: E1121 13:37:54.981195 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:54 crc kubenswrapper[4721]: I1121 13:37:54.981493 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:54 crc kubenswrapper[4721]: E1121 13:37:54.982229 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:55 crc kubenswrapper[4721]: I1121 13:37:55.980662 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:55 crc kubenswrapper[4721]: E1121 13:37:55.980863 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:56 crc kubenswrapper[4721]: I1121 13:37:56.981230 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:56 crc kubenswrapper[4721]: I1121 13:37:56.981343 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:56 crc kubenswrapper[4721]: E1121 13:37:56.981423 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:56 crc kubenswrapper[4721]: I1121 13:37:56.981457 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:56 crc kubenswrapper[4721]: E1121 13:37:56.981651 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:56 crc kubenswrapper[4721]: E1121 13:37:56.981764 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:57 crc kubenswrapper[4721]: I1121 13:37:57.981272 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:57 crc kubenswrapper[4721]: E1121 13:37:57.981791 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:37:58 crc kubenswrapper[4721]: I1121 13:37:58.981584 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:37:58 crc kubenswrapper[4721]: I1121 13:37:58.981649 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:37:58 crc kubenswrapper[4721]: E1121 13:37:58.981839 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:37:58 crc kubenswrapper[4721]: I1121 13:37:58.981868 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:37:58 crc kubenswrapper[4721]: E1121 13:37:58.982036 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:37:58 crc kubenswrapper[4721]: E1121 13:37:58.982208 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:37:59 crc kubenswrapper[4721]: I1121 13:37:59.981507 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:37:59 crc kubenswrapper[4721]: E1121 13:37:59.981903 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:00 crc kubenswrapper[4721]: I1121 13:38:00.981535 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:00 crc kubenswrapper[4721]: I1121 13:38:00.981599 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:00 crc kubenswrapper[4721]: E1121 13:38:00.981724 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:00 crc kubenswrapper[4721]: I1121 13:38:00.981775 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:00 crc kubenswrapper[4721]: E1121 13:38:00.982209 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:00 crc kubenswrapper[4721]: E1121 13:38:00.982007 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:01 crc kubenswrapper[4721]: I1121 13:38:01.981007 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:01 crc kubenswrapper[4721]: E1121 13:38:01.981306 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:02 crc kubenswrapper[4721]: I1121 13:38:02.981582 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:02 crc kubenswrapper[4721]: I1121 13:38:02.982368 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:02 crc kubenswrapper[4721]: I1121 13:38:02.982376 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:02 crc kubenswrapper[4721]: E1121 13:38:02.983367 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:02 crc kubenswrapper[4721]: E1121 13:38:02.983489 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:02 crc kubenswrapper[4721]: E1121 13:38:02.983683 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:03 crc kubenswrapper[4721]: E1121 13:38:03.011979 4721 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 21 13:38:03 crc kubenswrapper[4721]: E1121 13:38:03.152800 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 13:38:03 crc kubenswrapper[4721]: I1121 13:38:03.980833 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:03 crc kubenswrapper[4721]: E1121 13:38:03.981120 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:04 crc kubenswrapper[4721]: I1121 13:38:04.980718 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:04 crc kubenswrapper[4721]: E1121 13:38:04.980927 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:04 crc kubenswrapper[4721]: I1121 13:38:04.981046 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:04 crc kubenswrapper[4721]: I1121 13:38:04.981031 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:04 crc kubenswrapper[4721]: E1121 13:38:04.981293 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:04 crc kubenswrapper[4721]: E1121 13:38:04.981907 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:04 crc kubenswrapper[4721]: I1121 13:38:04.982426 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:38:04 crc kubenswrapper[4721]: E1121 13:38:04.982690 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x6mh9_openshift-ovn-kubernetes(7db08ddd-c14b-40b0-a2fd-67f84d6e747a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" Nov 21 13:38:05 crc kubenswrapper[4721]: I1121 13:38:05.981363 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:05 crc kubenswrapper[4721]: E1121 13:38:05.981624 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:06 crc kubenswrapper[4721]: I1121 13:38:06.981446 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:06 crc kubenswrapper[4721]: I1121 13:38:06.981519 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:06 crc kubenswrapper[4721]: I1121 13:38:06.981460 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:06 crc kubenswrapper[4721]: E1121 13:38:06.981818 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:06 crc kubenswrapper[4721]: E1121 13:38:06.981876 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:06 crc kubenswrapper[4721]: E1121 13:38:06.981940 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:07 crc kubenswrapper[4721]: I1121 13:38:07.980928 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:07 crc kubenswrapper[4721]: E1121 13:38:07.981950 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:08 crc kubenswrapper[4721]: E1121 13:38:08.154082 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 13:38:08 crc kubenswrapper[4721]: I1121 13:38:08.981454 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:08 crc kubenswrapper[4721]: E1121 13:38:08.981664 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:08 crc kubenswrapper[4721]: I1121 13:38:08.982326 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:08 crc kubenswrapper[4721]: I1121 13:38:08.982385 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:08 crc kubenswrapper[4721]: E1121 13:38:08.982467 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:08 crc kubenswrapper[4721]: E1121 13:38:08.982550 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:09 crc kubenswrapper[4721]: I1121 13:38:09.980802 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:09 crc kubenswrapper[4721]: E1121 13:38:09.981040 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:10 crc kubenswrapper[4721]: I1121 13:38:10.981249 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:10 crc kubenswrapper[4721]: I1121 13:38:10.981303 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:10 crc kubenswrapper[4721]: I1121 13:38:10.981361 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:10 crc kubenswrapper[4721]: E1121 13:38:10.981480 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:10 crc kubenswrapper[4721]: E1121 13:38:10.981619 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:10 crc kubenswrapper[4721]: E1121 13:38:10.981702 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:11 crc kubenswrapper[4721]: I1121 13:38:11.980964 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:11 crc kubenswrapper[4721]: E1121 13:38:11.981209 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:12 crc kubenswrapper[4721]: I1121 13:38:12.981180 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:12 crc kubenswrapper[4721]: I1121 13:38:12.981281 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:12 crc kubenswrapper[4721]: E1121 13:38:12.981399 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:12 crc kubenswrapper[4721]: I1121 13:38:12.981554 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:12 crc kubenswrapper[4721]: E1121 13:38:12.983726 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:12 crc kubenswrapper[4721]: E1121 13:38:12.983856 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:13 crc kubenswrapper[4721]: E1121 13:38:13.155153 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 13:38:13 crc kubenswrapper[4721]: I1121 13:38:13.981286 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:13 crc kubenswrapper[4721]: E1121 13:38:13.981550 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.968508 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/1.log" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.969487 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/0.log" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.969709 4721 generic.go:334] "Generic (PLEG): container finished" podID="15670f4b-cd59-4040-b6d5-87a7cf51d63f" containerID="03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8" exitCode=1 Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.969871 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerDied","Data":"03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8"} Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.970003 4721 scope.go:117] "RemoveContainer" containerID="1dbe335474d2338bc63abe174e92b991a4c6f82b642326c7ed0ea5fe7777eb69" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.970996 4721 scope.go:117] "RemoveContainer" containerID="03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8" Nov 21 13:38:14 crc kubenswrapper[4721]: E1121 13:38:14.971378 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-hp6ft_openshift-multus(15670f4b-cd59-4040-b6d5-87a7cf51d63f)\"" pod="openshift-multus/multus-hp6ft" podUID="15670f4b-cd59-4040-b6d5-87a7cf51d63f" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.981696 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:14 crc kubenswrapper[4721]: E1121 13:38:14.981921 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.982382 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:14 crc kubenswrapper[4721]: I1121 13:38:14.982550 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:14 crc kubenswrapper[4721]: E1121 13:38:14.982694 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:14 crc kubenswrapper[4721]: E1121 13:38:14.982791 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:15 crc kubenswrapper[4721]: I1121 13:38:15.978394 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/1.log" Nov 21 13:38:15 crc kubenswrapper[4721]: I1121 13:38:15.980998 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:15 crc kubenswrapper[4721]: E1121 13:38:15.981293 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:16 crc kubenswrapper[4721]: I1121 13:38:16.980642 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:16 crc kubenswrapper[4721]: E1121 13:38:16.980867 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:16 crc kubenswrapper[4721]: I1121 13:38:16.981151 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:16 crc kubenswrapper[4721]: I1121 13:38:16.981221 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:16 crc kubenswrapper[4721]: E1121 13:38:16.981389 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:16 crc kubenswrapper[4721]: E1121 13:38:16.981517 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:17 crc kubenswrapper[4721]: I1121 13:38:17.980910 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:17 crc kubenswrapper[4721]: E1121 13:38:17.981121 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:17 crc kubenswrapper[4721]: I1121 13:38:17.982445 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:38:18 crc kubenswrapper[4721]: E1121 13:38:18.156650 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 13:38:18 crc kubenswrapper[4721]: I1121 13:38:18.965518 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-sr6gv"] Nov 21 13:38:18 crc kubenswrapper[4721]: I1121 13:38:18.965624 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:18 crc kubenswrapper[4721]: E1121 13:38:18.965718 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:18 crc kubenswrapper[4721]: I1121 13:38:18.980621 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:18 crc kubenswrapper[4721]: I1121 13:38:18.980687 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:18 crc kubenswrapper[4721]: E1121 13:38:18.980737 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:18 crc kubenswrapper[4721]: I1121 13:38:18.980690 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:18 crc kubenswrapper[4721]: E1121 13:38:18.980880 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:18 crc kubenswrapper[4721]: E1121 13:38:18.981201 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:18 crc kubenswrapper[4721]: I1121 13:38:18.992273 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/3.log" Nov 21 13:38:19 crc kubenswrapper[4721]: I1121 13:38:19.001628 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerStarted","Data":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} Nov 21 13:38:19 crc kubenswrapper[4721]: I1121 13:38:19.002214 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:38:19 crc kubenswrapper[4721]: I1121 13:38:19.028705 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podStartSLOduration=106.028672482 podStartE2EDuration="1m46.028672482s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:19.027924568 +0000 UTC m=+137.005815836" watchObservedRunningTime="2025-11-21 13:38:19.028672482 +0000 UTC m=+137.006563740" Nov 21 13:38:20 crc kubenswrapper[4721]: I1121 13:38:20.981507 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:20 crc kubenswrapper[4721]: I1121 13:38:20.981554 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:20 crc kubenswrapper[4721]: E1121 13:38:20.981742 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:20 crc kubenswrapper[4721]: I1121 13:38:20.981796 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:20 crc kubenswrapper[4721]: I1121 13:38:20.981851 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:20 crc kubenswrapper[4721]: E1121 13:38:20.982008 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:20 crc kubenswrapper[4721]: E1121 13:38:20.982212 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:20 crc kubenswrapper[4721]: E1121 13:38:20.982295 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:22 crc kubenswrapper[4721]: I1121 13:38:22.980879 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:22 crc kubenswrapper[4721]: I1121 13:38:22.980984 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:22 crc kubenswrapper[4721]: E1121 13:38:22.981167 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:22 crc kubenswrapper[4721]: I1121 13:38:22.981255 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:22 crc kubenswrapper[4721]: I1121 13:38:22.981325 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:22 crc kubenswrapper[4721]: E1121 13:38:22.983349 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:22 crc kubenswrapper[4721]: E1121 13:38:22.983472 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:22 crc kubenswrapper[4721]: E1121 13:38:22.983499 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:23 crc kubenswrapper[4721]: E1121 13:38:23.158200 4721 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 13:38:24 crc kubenswrapper[4721]: I1121 13:38:24.980825 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:24 crc kubenswrapper[4721]: I1121 13:38:24.980905 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:24 crc kubenswrapper[4721]: I1121 13:38:24.980910 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:24 crc kubenswrapper[4721]: I1121 13:38:24.980977 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:24 crc kubenswrapper[4721]: E1121 13:38:24.980978 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:24 crc kubenswrapper[4721]: E1121 13:38:24.981230 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:24 crc kubenswrapper[4721]: E1121 13:38:24.981254 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:24 crc kubenswrapper[4721]: E1121 13:38:24.981313 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:25 crc kubenswrapper[4721]: I1121 13:38:25.981341 4721 scope.go:117] "RemoveContainer" containerID="03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8" Nov 21 13:38:26 crc kubenswrapper[4721]: I1121 13:38:26.981249 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:26 crc kubenswrapper[4721]: I1121 13:38:26.981329 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:26 crc kubenswrapper[4721]: I1121 13:38:26.981372 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:26 crc kubenswrapper[4721]: E1121 13:38:26.981664 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 13:38:26 crc kubenswrapper[4721]: I1121 13:38:26.981765 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:26 crc kubenswrapper[4721]: E1121 13:38:26.981954 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sr6gv" podUID="e613396d-3bd5-4a6f-8f9a-507f40954ee6" Nov 21 13:38:26 crc kubenswrapper[4721]: E1121 13:38:26.982349 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 13:38:26 crc kubenswrapper[4721]: E1121 13:38:26.982436 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 13:38:27 crc kubenswrapper[4721]: I1121 13:38:27.035846 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/1.log" Nov 21 13:38:27 crc kubenswrapper[4721]: I1121 13:38:27.035918 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerStarted","Data":"7b044bf3d42e21f5a67f6c3b085ed6017d26c6a42d604dbf526eeb6573630466"} Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.775433 4721 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.846217 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xhzsg"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.847566 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.854800 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.855548 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.880158 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.891875 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.900094 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.907373 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.907457 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.907631 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.910010 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.910630 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.911803 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.911818 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.913653 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.913656 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.913709 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.913931 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.914178 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.915628 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.924719 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-wrvwq"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.925500 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.929397 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.929398 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.937473 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-262dd"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.938043 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961135 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-audit\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961226 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-audit-policies\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961270 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961305 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-encryption-config\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961330 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961357 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-encryption-config\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961416 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-config\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961447 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppjss\" (UniqueName: \"kubernetes.io/projected/180dc212-b361-48e7-ad3e-70328538c4c6-kube-api-access-ppjss\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961477 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-config\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961504 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-image-import-ca\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-node-pullsecrets\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961637 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-etcd-client\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961742 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-client-ca\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961775 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/180dc212-b361-48e7-ad3e-70328538c4c6-serving-cert\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961831 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-serving-cert\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961895 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kgnk\" (UniqueName: \"kubernetes.io/projected/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-kube-api-access-2kgnk\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961962 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-audit-dir\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.961991 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s6f2\" (UniqueName: \"kubernetes.io/projected/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-kube-api-access-4s6f2\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.962152 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-etcd-serving-ca\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.962188 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-etcd-client\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.962244 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-serving-cert\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.962272 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.962300 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-audit-dir\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.976057 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.976576 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dmhsg"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.977276 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.980996 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.981368 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.981861 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.982845 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988532 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988572 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988629 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988688 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988770 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988887 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989032 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989105 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989206 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989254 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.988557 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989386 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989501 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989744 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989804 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989862 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989806 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989920 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989943 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.990000 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.989040 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.990216 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.990307 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.991108 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.993107 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf"] Nov 21 13:38:28 crc kubenswrapper[4721]: I1121 13:38:28.993699 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:28.998178 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-gjhwn"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.009247 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-vvz5j"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.016639 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.043786 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.044456 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.044570 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.044888 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.044905 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.045011 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.045104 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.045204 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.045555 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.045598 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.046114 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.050261 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.064418 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.064655 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.064722 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.064817 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.065028 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.065040 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.065933 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-client-ca\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.065997 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac0251e7-5184-49b4-84ae-ab058632718c-console-oauth-config\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066055 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/786088ab-7a61-4923-9dbd-56ffac4d3230-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066123 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-serving-cert\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066159 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/180dc212-b361-48e7-ad3e-70328538c4c6-serving-cert\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066191 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-audit-dir\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066226 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s6f2\" (UniqueName: \"kubernetes.io/projected/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-kube-api-access-4s6f2\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066275 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kgnk\" (UniqueName: \"kubernetes.io/projected/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-kube-api-access-2kgnk\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066328 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-trusted-ca-bundle\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066369 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-etcd-serving-ca\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066396 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-config\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066419 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-auth-proxy-config\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066441 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-etcd-client\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066465 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-serving-cert\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066492 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066518 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-audit-dir\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066547 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-oauth-serving-cert\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066571 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/786088ab-7a61-4923-9dbd-56ffac4d3230-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066602 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066599 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066626 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-service-ca\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066655 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-audit\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066688 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-service-ca-bundle\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066711 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrqsz\" (UniqueName: \"kubernetes.io/projected/786088ab-7a61-4923-9dbd-56ffac4d3230-kube-api-access-rrqsz\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066740 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-audit-policies\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066768 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-config\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/305d9e46-16ee-4444-a4af-c58ac4c014f2-serving-cert\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066818 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066852 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066877 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066899 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-console-config\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066925 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-encryption-config\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066949 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-serving-cert\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.066979 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-encryption-config\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067003 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-config\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067057 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac0251e7-5184-49b4-84ae-ab058632718c-console-serving-cert\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067131 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppjss\" (UniqueName: \"kubernetes.io/projected/180dc212-b361-48e7-ad3e-70328538c4c6-kube-api-access-ppjss\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067157 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-image-import-ca\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067178 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-config\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067200 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-machine-approver-tls\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067224 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-node-pullsecrets\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067242 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9xpx\" (UniqueName: \"kubernetes.io/projected/305d9e46-16ee-4444-a4af-c58ac4c014f2-kube-api-access-t9xpx\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067260 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf7fz\" (UniqueName: \"kubernetes.io/projected/ac0251e7-5184-49b4-84ae-ab058632718c-kube-api-access-lf7fz\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067277 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zbcd\" (UniqueName: \"kubernetes.io/projected/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-kube-api-access-9zbcd\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067308 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-client-ca\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067326 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc4r5\" (UniqueName: \"kubernetes.io/projected/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-kube-api-access-nc4r5\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067353 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-etcd-client\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067389 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-config\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.067886 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-config\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.068776 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-audit-policies\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.069408 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.070276 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-crzdf"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.070950 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.071902 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.072352 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fnzlv"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.072670 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.073127 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.074048 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.074351 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.074610 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.074962 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.075204 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.075541 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.075873 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.076239 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.077802 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.077940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-client-ca\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.078056 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.084149 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-node-pullsecrets\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.084254 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.084308 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tcjm6"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.084625 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-audit-dir\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.085999 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-etcd-serving-ca\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.086922 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.087598 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-image-import-ca\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.087806 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-config\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.087905 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-audit-dir\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.088295 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.088667 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.088823 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.088992 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089096 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089206 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089320 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089368 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089333 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089510 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089588 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089662 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089721 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089761 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089779 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089801 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089856 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089873 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089885 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089957 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.089990 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.090204 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.090241 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.090703 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.091476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-etcd-client\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.092114 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.092603 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.096103 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.096444 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.098951 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/180dc212-b361-48e7-ad3e-70328538c4c6-serving-cert\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.103278 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-audit\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.103603 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-encryption-config\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.104010 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.105636 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-serving-cert\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.106403 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.121713 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-encryption-config\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.122026 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-etcd-client\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.122024 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-serving-cert\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.123227 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.127438 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.128459 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.128508 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.128586 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.128820 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.128997 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.145756 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s6f2\" (UniqueName: \"kubernetes.io/projected/85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a-kube-api-access-4s6f2\") pod \"apiserver-7bbb656c7d-nbcn9\" (UID: \"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.146581 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.148190 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.150262 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j4d5j"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.151166 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xhzsg"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.151239 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kgnk\" (UniqueName: \"kubernetes.io/projected/3f75b560-06f9-4c2d-94f0-64ae3d351bfa-kube-api-access-2kgnk\") pod \"apiserver-76f77b778f-xhzsg\" (UID: \"3f75b560-06f9-4c2d-94f0-64ae3d351bfa\") " pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.151257 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.151464 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.152581 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.153637 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jv2w5"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.154519 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.155271 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.156705 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-26t29"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.157397 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppjss\" (UniqueName: \"kubernetes.io/projected/180dc212-b361-48e7-ad3e-70328538c4c6-kube-api-access-ppjss\") pod \"route-controller-manager-6576b87f9c-r49kl\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.157798 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-rr29s"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.157897 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.158362 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.158365 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.158465 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.158526 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.158833 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.159824 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.160246 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.160678 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.162218 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.162768 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.165566 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.167403 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.167868 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168007 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-trusted-ca-bundle\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168038 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-config\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168078 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-auth-proxy-config\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168097 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-oauth-serving-cert\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168117 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/786088ab-7a61-4923-9dbd-56ffac4d3230-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168139 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168174 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-service-ca\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168198 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-service-ca-bundle\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168223 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-config\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168239 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrqsz\" (UniqueName: \"kubernetes.io/projected/786088ab-7a61-4923-9dbd-56ffac4d3230-kube-api-access-rrqsz\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168255 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/305d9e46-16ee-4444-a4af-c58ac4c014f2-serving-cert\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168273 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168292 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-console-config\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168308 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-serving-cert\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168328 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac0251e7-5184-49b4-84ae-ab058632718c-console-serving-cert\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168346 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-machine-approver-tls\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168366 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9xpx\" (UniqueName: \"kubernetes.io/projected/305d9e46-16ee-4444-a4af-c58ac4c014f2-kube-api-access-t9xpx\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168384 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf7fz\" (UniqueName: \"kubernetes.io/projected/ac0251e7-5184-49b4-84ae-ab058632718c-kube-api-access-lf7fz\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168401 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zbcd\" (UniqueName: \"kubernetes.io/projected/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-kube-api-access-9zbcd\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168418 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-client-ca\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc4r5\" (UniqueName: \"kubernetes.io/projected/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-kube-api-access-nc4r5\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168466 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-config\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168484 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac0251e7-5184-49b4-84ae-ab058632718c-console-oauth-config\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168500 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/786088ab-7a61-4923-9dbd-56ffac4d3230-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.168816 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.169984 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-oauth-serving-cert\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.171018 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-auth-proxy-config\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.172002 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-config\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.172588 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/786088ab-7a61-4923-9dbd-56ffac4d3230-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.173221 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-client-ca\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.173460 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-trusted-ca-bundle\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.173871 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-config\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.174813 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.175188 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.175940 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-console-config\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.176121 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.177142 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.177347 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-config\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.177395 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-serving-cert\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.177765 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.177969 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.178901 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ac0251e7-5184-49b4-84ae-ab058632718c-service-ca\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.178918 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-service-ca-bundle\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.178965 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/786088ab-7a61-4923-9dbd-56ffac4d3230-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.179779 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p8rcl"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.180088 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.180099 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ac0251e7-5184-49b4-84ae-ab058632718c-console-oauth-config\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.180530 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ac0251e7-5184-49b4-84ae-ab058632718c-console-serving-cert\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.180550 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.180634 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.180706 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-machine-approver-tls\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.181021 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.181518 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.181826 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.181943 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.181996 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.182522 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.184830 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.187387 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.187405 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.188463 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.193738 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.193741 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/305d9e46-16ee-4444-a4af-c58ac4c014f2-serving-cert\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.196541 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.197137 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.199581 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.200215 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.202724 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-78vlb"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.203849 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.204308 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.212737 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.215895 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.216835 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zhzq5"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.217473 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.218513 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.218901 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j4d5j"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.221046 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-wrvwq"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.222483 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vvz5j"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.224479 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.227277 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-crzdf"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.227315 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.230673 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.230670 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.232165 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dmhsg"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.234046 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.235015 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-gjhwn"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.238865 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fnzlv"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.239963 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.241364 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-76xjj"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.242778 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.246192 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.246234 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.246247 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-26t29"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.248115 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.249498 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.250964 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.252119 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.252515 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5w6rn"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.254057 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.254884 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.255840 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-78vlb"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.257273 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.258837 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.260463 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tcjm6"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.261862 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p8rcl"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.264579 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.265367 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.267205 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jv2w5"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.268173 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.269234 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.269765 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.270364 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-76xjj"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.271536 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.272498 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.273738 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.274786 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.275813 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zhzq5"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.276942 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5w6rn"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.278928 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-pnmsb"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.280618 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-pnmsb"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.280736 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.283141 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-5fwvr"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.283679 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.294163 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.313605 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.347433 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.350808 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.370103 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.390951 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.410103 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.412691 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl"] Nov 21 13:38:29 crc kubenswrapper[4721]: W1121 13:38:29.418804 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod180dc212_b361_48e7_ad3e_70328538c4c6.slice/crio-731151043c96423f0e1e1f9c61598998cfdb75ca3dce96bb70aaedc223c48364 WatchSource:0}: Error finding container 731151043c96423f0e1e1f9c61598998cfdb75ca3dce96bb70aaedc223c48364: Status 404 returned error can't find the container with id 731151043c96423f0e1e1f9c61598998cfdb75ca3dce96bb70aaedc223c48364 Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.430020 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.457633 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.459444 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xhzsg"] Nov 21 13:38:29 crc kubenswrapper[4721]: W1121 13:38:29.465466 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f75b560_06f9_4c2d_94f0_64ae3d351bfa.slice/crio-2be41b40e197a331027522c1b9f190deb46d4a4aa93633eb5122584ada20cff2 WatchSource:0}: Error finding container 2be41b40e197a331027522c1b9f190deb46d4a4aa93633eb5122584ada20cff2: Status 404 returned error can't find the container with id 2be41b40e197a331027522c1b9f190deb46d4a4aa93633eb5122584ada20cff2 Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.470291 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.481051 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9"] Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.491316 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.510850 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.529753 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.549812 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.577711 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.610337 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.629039 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.651216 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.670253 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673454 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b6ae0efb-0b79-4944-9dbb-44da8a370450-installation-pull-secrets\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673516 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673556 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-ca\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673595 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ce2425-0836-4d18-95b7-cdf56c182b25-config\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673715 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/97b0bd4b-9511-4ee5-a6ba-d110efff4010-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673756 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673785 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673812 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/97b0bd4b-9511-4ee5-a6ba-d110efff4010-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673841 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673874 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673912 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/02155afe-72cf-495e-92e5-3375bf701452-available-featuregates\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.673930 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674052 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-client\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674169 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/97b0bd4b-9511-4ee5-a6ba-d110efff4010-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674234 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-certificates\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674510 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674565 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-tls\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674592 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs7nj\" (UniqueName: \"kubernetes.io/projected/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-kube-api-access-rs7nj\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674623 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg67d\" (UniqueName: \"kubernetes.io/projected/8b0cfc08-43fe-4026-b4d1-b3aae8c26fde-kube-api-access-cg67d\") pod \"downloads-7954f5f757-vvz5j\" (UID: \"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde\") " pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674646 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/02155afe-72cf-495e-92e5-3375bf701452-serving-cert\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674710 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-bound-sa-token\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674785 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-config\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674813 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/022a1886-afb6-49ca-801b-2a57240223bf-audit-dir\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674836 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674920 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k7lw\" (UniqueName: \"kubernetes.io/projected/97b0bd4b-9511-4ee5-a6ba-d110efff4010-kube-api-access-9k7lw\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674949 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.674977 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b6ae0efb-0b79-4944-9dbb-44da8a370450-ca-trust-extracted\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snrgs\" (UniqueName: \"kubernetes.io/projected/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-kube-api-access-snrgs\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675038 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttrp9\" (UniqueName: \"kubernetes.io/projected/022a1886-afb6-49ca-801b-2a57240223bf-kube-api-access-ttrp9\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-trusted-ca\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675235 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675263 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-audit-policies\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675313 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675361 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a6ce2425-0836-4d18-95b7-cdf56c182b25-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675407 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675430 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgkmg\" (UniqueName: \"kubernetes.io/projected/a6ce2425-0836-4d18-95b7-cdf56c182b25-kube-api-access-rgkmg\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675493 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a6ce2425-0836-4d18-95b7-cdf56c182b25-images\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675521 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-serving-cert\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675542 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-service-ca\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675568 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675594 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52krc\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-kube-api-access-52krc\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675660 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bj4h\" (UniqueName: \"kubernetes.io/projected/02155afe-72cf-495e-92e5-3375bf701452-kube-api-access-9bj4h\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.675684 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.677380 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.177308729 +0000 UTC m=+148.155200217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.689769 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.709974 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.730118 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.751249 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.771280 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.776694 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.777042 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.276997312 +0000 UTC m=+148.254888600 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777177 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-stats-auth\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777199 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-metrics-certs\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777229 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t78gq\" (UniqueName: \"kubernetes.io/projected/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-kube-api-access-t78gq\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777259 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b6ae0efb-0b79-4944-9dbb-44da8a370450-ca-trust-extracted\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777287 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snrgs\" (UniqueName: \"kubernetes.io/projected/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-kube-api-access-snrgs\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777314 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s4mc\" (UniqueName: \"kubernetes.io/projected/71fa613a-a051-44cb-9532-0bec74a4c46d-kube-api-access-8s4mc\") pod \"multus-admission-controller-857f4d67dd-p8rcl\" (UID: \"71fa613a-a051-44cb-9532-0bec74a4c46d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777351 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v58mf\" (UniqueName: \"kubernetes.io/projected/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-kube-api-access-v58mf\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777374 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/71fa613a-a051-44cb-9532-0bec74a4c46d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p8rcl\" (UID: \"71fa613a-a051-44cb-9532-0bec74a4c46d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777396 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-config\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777421 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz7k9\" (UniqueName: \"kubernetes.io/projected/3cc6633d-c47d-4179-89c9-8ffd18bd0707-kube-api-access-mz7k9\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777450 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-default-certificate\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777471 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777498 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777521 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-config\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777551 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a6ce2425-0836-4d18-95b7-cdf56c182b25-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777574 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-images\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777617 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj82b\" (UniqueName: \"kubernetes.io/projected/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-kube-api-access-bj82b\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777642 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777667 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgkmg\" (UniqueName: \"kubernetes.io/projected/a6ce2425-0836-4d18-95b7-cdf56c182b25-kube-api-access-rgkmg\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777692 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a2271db-e669-4c34-94c0-35d7a3ec0468-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777730 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-serving-cert\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777754 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777780 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a6ce2425-0836-4d18-95b7-cdf56c182b25-images\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777808 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52krc\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-kube-api-access-52krc\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777837 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-registration-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777863 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l8mw\" (UniqueName: \"kubernetes.io/projected/eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7-kube-api-access-6l8mw\") pod \"dns-operator-744455d44c-jv2w5\" (UID: \"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777885 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-profile-collector-cert\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777909 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-cabundle\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777935 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bj4h\" (UniqueName: \"kubernetes.io/projected/02155afe-72cf-495e-92e5-3375bf701452-kube-api-access-9bj4h\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777945 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b6ae0efb-0b79-4944-9dbb-44da8a370450-ca-trust-extracted\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.777960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8bd725df-b6b1-4de7-a77a-917a4f592b92-config-volume\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778097 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-plugins-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778135 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1072b74-e682-4755-ae65-af3f13ab5fe2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778179 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b6ae0efb-0b79-4944-9dbb-44da8a370450-installation-pull-secrets\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-ca\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778258 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/db1c61b3-a406-4b4c-970e-fa47d27a966a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pgrjc\" (UID: \"db1c61b3-a406-4b4c-970e-fa47d27a966a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778306 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6nv7\" (UniqueName: \"kubernetes.io/projected/db1c61b3-a406-4b4c-970e-fa47d27a966a-kube-api-access-j6nv7\") pod \"cluster-samples-operator-665b6dd947-pgrjc\" (UID: \"db1c61b3-a406-4b4c-970e-fa47d27a966a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778352 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778407 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-socket-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778447 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/457ed397-53fe-415b-8d9c-855986ed1108-config-volume\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778486 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/97b0bd4b-9511-4ee5-a6ba-d110efff4010-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778525 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778560 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkbll\" (UniqueName: \"kubernetes.io/projected/1d35c914-e539-4c91-87b0-df193d6ba649-kube-api-access-dkbll\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778600 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/36924117-0998-45ef-8887-4a8aa7f5c392-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-r9lmq\" (UID: \"36924117-0998-45ef-8887-4a8aa7f5c392\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778637 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7cfe2940-b7fe-4556-882d-e413f1f2a270-service-ca-bundle\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778671 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778735 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778768 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778804 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/407861d7-5569-41aa-bcca-0250e213d7c4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4n9g5\" (UID: \"407861d7-5569-41aa-bcca-0250e213d7c4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7-metrics-tls\") pod \"dns-operator-744455d44c-jv2w5\" (UID: \"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778876 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778912 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778947 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8bd725df-b6b1-4de7-a77a-917a4f592b92-secret-volume\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.778978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-csi-data-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.779010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cfdebc10-56d8-422a-9905-b601c5e52a3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.779046 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhgzp\" (UniqueName: \"kubernetes.io/projected/f5ce7765-3021-4ace-89f4-1b78230d9def-kube-api-access-bhgzp\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.779099 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.780293 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-ca\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.780707 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.280670967 +0000 UTC m=+148.258562255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.779114 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.781787 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.781849 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d35c914-e539-4c91-87b0-df193d6ba649-proxy-tls\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.781891 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs7nj\" (UniqueName: \"kubernetes.io/projected/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-kube-api-access-rs7nj\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.781915 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg67d\" (UniqueName: \"kubernetes.io/projected/8b0cfc08-43fe-4026-b4d1-b3aae8c26fde-kube-api-access-cg67d\") pod \"downloads-7954f5f757-vvz5j\" (UID: \"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde\") " pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.781946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-mountpoint-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.781986 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-bound-sa-token\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782003 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzxjz\" (UniqueName: \"kubernetes.io/projected/682dcfaa-ba18-421a-bbd2-4c29d22e2373-kube-api-access-nzxjz\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782030 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-config\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782087 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k7lw\" (UniqueName: \"kubernetes.io/projected/97b0bd4b-9511-4ee5-a6ba-d110efff4010-kube-api-access-9k7lw\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782108 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782127 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782145 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-srv-cert\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782166 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn8wz\" (UniqueName: \"kubernetes.io/projected/8bd725df-b6b1-4de7-a77a-917a4f592b92-kube-api-access-tn8wz\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782193 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q466w\" (UniqueName: \"kubernetes.io/projected/457ed397-53fe-415b-8d9c-855986ed1108-kube-api-access-q466w\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttrp9\" (UniqueName: \"kubernetes.io/projected/022a1886-afb6-49ca-801b-2a57240223bf-kube-api-access-ttrp9\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782392 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll5x7\" (UniqueName: \"kubernetes.io/projected/cfdebc10-56d8-422a-9905-b601c5e52a3e-kube-api-access-ll5x7\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782419 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-trusted-ca\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782439 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782459 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a2271db-e669-4c34-94c0-35d7a3ec0468-config\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782485 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-audit-policies\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782508 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6bts\" (UniqueName: \"kubernetes.io/projected/70faefe3-8edc-4aab-8948-0478073732ae-kube-api-access-h6bts\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782533 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-webhook-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782578 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbxwc\" (UniqueName: \"kubernetes.io/projected/f1072b74-e682-4755-ae65-af3f13ab5fe2-kube-api-access-rbxwc\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.782614 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1072b74-e682-4755-ae65-af3f13ab5fe2-metrics-tls\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783202 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a6ce2425-0836-4d18-95b7-cdf56c182b25-images\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783571 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-service-ca\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783638 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-config\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783726 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783924 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70faefe3-8edc-4aab-8948-0478073732ae-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783957 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.783989 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784017 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-proxy-tls\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784054 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6gdj\" (UniqueName: \"kubernetes.io/projected/407861d7-5569-41aa-bcca-0250e213d7c4-kube-api-access-f6gdj\") pod \"control-plane-machine-set-operator-78cbb6b69f-4n9g5\" (UID: \"407861d7-5569-41aa-bcca-0250e213d7c4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784090 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qhs9\" (UniqueName: \"kubernetes.io/projected/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-kube-api-access-9qhs9\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784168 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784201 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784223 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ce2425-0836-4d18-95b7-cdf56c182b25-config\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784251 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd477a26-ea1b-4159-81c5-e861d8541643-cert\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784271 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3cc6633d-c47d-4179-89c9-8ffd18bd0707-tmpfs\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784291 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/97b0bd4b-9511-4ee5-a6ba-d110efff4010-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784609 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.784674 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-service-ca\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.785109 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-audit-policies\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.786253 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787393 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6ce2425-0836-4d18-95b7-cdf56c182b25-config\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787552 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/97b0bd4b-9511-4ee5-a6ba-d110efff4010-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787591 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/02155afe-72cf-495e-92e5-3375bf701452-available-featuregates\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787612 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-apiservice-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787640 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787847 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70faefe3-8edc-4aab-8948-0478073732ae-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.787968 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wkg5\" (UniqueName: \"kubernetes.io/projected/c6fe4d9d-7eef-4287-b166-43800e03e8de-kube-api-access-5wkg5\") pod \"migrator-59844c95c7-pbtkt\" (UID: \"c6fe4d9d-7eef-4287-b166-43800e03e8de\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.788016 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/02155afe-72cf-495e-92e5-3375bf701452-available-featuregates\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.788045 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ft7d\" (UniqueName: \"kubernetes.io/projected/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-kube-api-access-2ft7d\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.788102 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.788164 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.788185 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d35c914-e539-4c91-87b0-df193d6ba649-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.789347 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.789831 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.789876 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.788441 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-serving-cert\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.789978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-node-bootstrap-token\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790121 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-key\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790163 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6npwq\" (UniqueName: \"kubernetes.io/projected/6566be5a-1fb9-4941-bd16-8c7a122debff-kube-api-access-6npwq\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790210 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-client\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790258 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/97b0bd4b-9511-4ee5-a6ba-d110efff4010-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790300 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-certificates\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790336 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a2271db-e669-4c34-94c0-35d7a3ec0468-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790371 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-config\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790413 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-tls\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790447 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/457ed397-53fe-415b-8d9c-855986ed1108-metrics-tls\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790501 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cfdebc10-56d8-422a-9905-b601c5e52a3e-srv-cert\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790537 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvs95\" (UniqueName: \"kubernetes.io/projected/cd477a26-ea1b-4159-81c5-e861d8541643-kube-api-access-rvs95\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790573 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-serving-cert\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790615 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/02155afe-72cf-495e-92e5-3375bf701452-serving-cert\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790652 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqlqn\" (UniqueName: \"kubernetes.io/projected/36924117-0998-45ef-8887-4a8aa7f5c392-kube-api-access-hqlqn\") pod \"package-server-manager-789f6589d5-r9lmq\" (UID: \"36924117-0998-45ef-8887-4a8aa7f5c392\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790705 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1072b74-e682-4755-ae65-af3f13ab5fe2-trusted-ca\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790737 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-trusted-ca\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790777 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/022a1886-afb6-49ca-801b-2a57240223bf-audit-dir\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790813 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vl8w\" (UniqueName: \"kubernetes.io/projected/7cfe2940-b7fe-4556-882d-e413f1f2a270-kube-api-access-9vl8w\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790847 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-certs\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790894 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.790899 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-trusted-ca\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.791321 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.791498 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/022a1886-afb6-49ca-801b-2a57240223bf-audit-dir\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.791630 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.792798 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a6ce2425-0836-4d18-95b7-cdf56c182b25-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.793135 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-certificates\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.794585 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-serving-cert\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.794740 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.794874 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.795335 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b6ae0efb-0b79-4944-9dbb-44da8a370450-installation-pull-secrets\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.795612 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/02155afe-72cf-495e-92e5-3375bf701452-serving-cert\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.796825 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/97b0bd4b-9511-4ee5-a6ba-d110efff4010-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.797740 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-etcd-client\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.797802 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-tls\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.810461 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.829864 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.850040 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.872018 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.889421 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892237 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.892521 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.3924688 +0000 UTC m=+148.370360058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892628 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6bts\" (UniqueName: \"kubernetes.io/projected/70faefe3-8edc-4aab-8948-0478073732ae-kube-api-access-h6bts\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892690 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-webhook-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892741 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbxwc\" (UniqueName: \"kubernetes.io/projected/f1072b74-e682-4755-ae65-af3f13ab5fe2-kube-api-access-rbxwc\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892774 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1072b74-e682-4755-ae65-af3f13ab5fe2-metrics-tls\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892834 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892866 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70faefe3-8edc-4aab-8948-0478073732ae-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892900 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-proxy-tls\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892936 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6gdj\" (UniqueName: \"kubernetes.io/projected/407861d7-5569-41aa-bcca-0250e213d7c4-kube-api-access-f6gdj\") pod \"control-plane-machine-set-operator-78cbb6b69f-4n9g5\" (UID: \"407861d7-5569-41aa-bcca-0250e213d7c4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892971 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qhs9\" (UniqueName: \"kubernetes.io/projected/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-kube-api-access-9qhs9\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.892998 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893035 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3cc6633d-c47d-4179-89c9-8ffd18bd0707-tmpfs\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893090 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd477a26-ea1b-4159-81c5-e861d8541643-cert\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893126 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-apiservice-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893162 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70faefe3-8edc-4aab-8948-0478073732ae-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893193 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d35c914-e539-4c91-87b0-df193d6ba649-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893217 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wkg5\" (UniqueName: \"kubernetes.io/projected/c6fe4d9d-7eef-4287-b166-43800e03e8de-kube-api-access-5wkg5\") pod \"migrator-59844c95c7-pbtkt\" (UID: \"c6fe4d9d-7eef-4287-b166-43800e03e8de\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893241 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ft7d\" (UniqueName: \"kubernetes.io/projected/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-kube-api-access-2ft7d\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893275 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-serving-cert\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893330 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-node-bootstrap-token\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893355 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-key\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893378 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6npwq\" (UniqueName: \"kubernetes.io/projected/6566be5a-1fb9-4941-bd16-8c7a122debff-kube-api-access-6npwq\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893407 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-config\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893437 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a2271db-e669-4c34-94c0-35d7a3ec0468-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893461 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvs95\" (UniqueName: \"kubernetes.io/projected/cd477a26-ea1b-4159-81c5-e861d8541643-kube-api-access-rvs95\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893488 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-serving-cert\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893517 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/457ed397-53fe-415b-8d9c-855986ed1108-metrics-tls\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893543 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cfdebc10-56d8-422a-9905-b601c5e52a3e-srv-cert\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893572 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqlqn\" (UniqueName: \"kubernetes.io/projected/36924117-0998-45ef-8887-4a8aa7f5c392-kube-api-access-hqlqn\") pod \"package-server-manager-789f6589d5-r9lmq\" (UID: \"36924117-0998-45ef-8887-4a8aa7f5c392\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893614 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1072b74-e682-4755-ae65-af3f13ab5fe2-trusted-ca\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893642 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-trusted-ca\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893674 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vl8w\" (UniqueName: \"kubernetes.io/projected/7cfe2940-b7fe-4556-882d-e413f1f2a270-kube-api-access-9vl8w\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893703 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-certs\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893733 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-stats-auth\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893755 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-metrics-certs\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893782 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t78gq\" (UniqueName: \"kubernetes.io/projected/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-kube-api-access-t78gq\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893825 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s4mc\" (UniqueName: \"kubernetes.io/projected/71fa613a-a051-44cb-9532-0bec74a4c46d-kube-api-access-8s4mc\") pod \"multus-admission-controller-857f4d67dd-p8rcl\" (UID: \"71fa613a-a051-44cb-9532-0bec74a4c46d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893855 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-config\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893879 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz7k9\" (UniqueName: \"kubernetes.io/projected/3cc6633d-c47d-4179-89c9-8ffd18bd0707-kube-api-access-mz7k9\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893922 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v58mf\" (UniqueName: \"kubernetes.io/projected/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-kube-api-access-v58mf\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893950 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/71fa613a-a051-44cb-9532-0bec74a4c46d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p8rcl\" (UID: \"71fa613a-a051-44cb-9532-0bec74a4c46d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.893984 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-default-certificate\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894014 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894041 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-config\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894043 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3cc6633d-c47d-4179-89c9-8ffd18bd0707-tmpfs\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894140 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-images\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894190 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj82b\" (UniqueName: \"kubernetes.io/projected/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-kube-api-access-bj82b\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894245 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a2271db-e669-4c34-94c0-35d7a3ec0468-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894303 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-registration-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894336 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l8mw\" (UniqueName: \"kubernetes.io/projected/eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7-kube-api-access-6l8mw\") pod \"dns-operator-744455d44c-jv2w5\" (UID: \"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894372 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-profile-collector-cert\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894404 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-cabundle\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894435 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8bd725df-b6b1-4de7-a77a-917a4f592b92-config-volume\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894494 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-plugins-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894529 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1072b74-e682-4755-ae65-af3f13ab5fe2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894568 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894615 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/db1c61b3-a406-4b4c-970e-fa47d27a966a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pgrjc\" (UID: \"db1c61b3-a406-4b4c-970e-fa47d27a966a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894653 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6nv7\" (UniqueName: \"kubernetes.io/projected/db1c61b3-a406-4b4c-970e-fa47d27a966a-kube-api-access-j6nv7\") pod \"cluster-samples-operator-665b6dd947-pgrjc\" (UID: \"db1c61b3-a406-4b4c-970e-fa47d27a966a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894696 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-socket-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894727 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/457ed397-53fe-415b-8d9c-855986ed1108-config-volume\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894774 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/36924117-0998-45ef-8887-4a8aa7f5c392-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-r9lmq\" (UID: \"36924117-0998-45ef-8887-4a8aa7f5c392\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894821 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkbll\" (UniqueName: \"kubernetes.io/projected/1d35c914-e539-4c91-87b0-df193d6ba649-kube-api-access-dkbll\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894856 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7cfe2940-b7fe-4556-882d-e413f1f2a270-service-ca-bundle\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894909 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.894953 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/407861d7-5569-41aa-bcca-0250e213d7c4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4n9g5\" (UID: \"407861d7-5569-41aa-bcca-0250e213d7c4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895002 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7-metrics-tls\") pod \"dns-operator-744455d44c-jv2w5\" (UID: \"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895033 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895092 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895133 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8bd725df-b6b1-4de7-a77a-917a4f592b92-secret-volume\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895168 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-csi-data-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895211 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cfdebc10-56d8-422a-9905-b601c5e52a3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895245 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhgzp\" (UniqueName: \"kubernetes.io/projected/f5ce7765-3021-4ace-89f4-1b78230d9def-kube-api-access-bhgzp\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895279 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895310 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d35c914-e539-4c91-87b0-df193d6ba649-proxy-tls\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895365 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-mountpoint-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895434 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzxjz\" (UniqueName: \"kubernetes.io/projected/682dcfaa-ba18-421a-bbd2-4c29d22e2373-kube-api-access-nzxjz\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895505 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895546 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-srv-cert\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895580 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn8wz\" (UniqueName: \"kubernetes.io/projected/8bd725df-b6b1-4de7-a77a-917a4f592b92-kube-api-access-tn8wz\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895624 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q466w\" (UniqueName: \"kubernetes.io/projected/457ed397-53fe-415b-8d9c-855986ed1108-kube-api-access-q466w\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895664 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll5x7\" (UniqueName: \"kubernetes.io/projected/cfdebc10-56d8-422a-9905-b601c5e52a3e-kube-api-access-ll5x7\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895701 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a2271db-e669-4c34-94c0-35d7a3ec0468-config\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.895905 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d35c914-e539-4c91-87b0-df193d6ba649-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.896510 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.396484946 +0000 UTC m=+148.374376204 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.896786 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-plugins-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897164 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-registration-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897392 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7cfe2940-b7fe-4556-882d-e413f1f2a270-service-ca-bundle\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897507 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a2271db-e669-4c34-94c0-35d7a3ec0468-config\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897634 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-mountpoint-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897646 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-csi-data-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897809 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.897933 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-socket-dir\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.899480 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-config\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.900620 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a2271db-e669-4c34-94c0-35d7a3ec0468-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.901228 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.901623 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-trusted-ca\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.902698 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-metrics-certs\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.905306 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-default-certificate\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.911748 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.911854 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1072b74-e682-4755-ae65-af3f13ab5fe2-metrics-tls\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.911970 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7-metrics-tls\") pod \"dns-operator-744455d44c-jv2w5\" (UID: \"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.912472 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1072b74-e682-4755-ae65-af3f13ab5fe2-trusted-ca\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.911993 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7cfe2940-b7fe-4556-882d-e413f1f2a270-stats-auth\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.913695 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.918306 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-serving-cert\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.931273 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.944565 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/db1c61b3-a406-4b4c-970e-fa47d27a966a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-pgrjc\" (UID: \"db1c61b3-a406-4b4c-970e-fa47d27a966a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.951943 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.971226 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.990741 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.997430 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.997796 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.49777309 +0000 UTC m=+148.475664348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:29 crc kubenswrapper[4721]: I1121 13:38:29.997959 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:29 crc kubenswrapper[4721]: E1121 13:38:29.998448 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.498434291 +0000 UTC m=+148.476325559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.011258 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.031772 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.041655 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.049752 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.054410 4721 generic.go:334] "Generic (PLEG): container finished" podID="3f75b560-06f9-4c2d-94f0-64ae3d351bfa" containerID="25168e5175900b8b70a50348f54357c06366113ec1ae9364e4ea38f23ffd23c4" exitCode=0 Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.054482 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" event={"ID":"3f75b560-06f9-4c2d-94f0-64ae3d351bfa","Type":"ContainerDied","Data":"25168e5175900b8b70a50348f54357c06366113ec1ae9364e4ea38f23ffd23c4"} Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.054891 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" event={"ID":"3f75b560-06f9-4c2d-94f0-64ae3d351bfa","Type":"ContainerStarted","Data":"2be41b40e197a331027522c1b9f190deb46d4a4aa93633eb5122584ada20cff2"} Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.057291 4721 generic.go:334] "Generic (PLEG): container finished" podID="85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a" containerID="25e868ab85bb68c8d3ac7e33bc44045b7dbd1a2cb9e01ef1e6c6ecd62d4c0d9f" exitCode=0 Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.057383 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" event={"ID":"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a","Type":"ContainerDied","Data":"25e868ab85bb68c8d3ac7e33bc44045b7dbd1a2cb9e01ef1e6c6ecd62d4c0d9f"} Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.057482 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-config\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.057495 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" event={"ID":"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a","Type":"ContainerStarted","Data":"32ade02b97101965261868380c7244fbad2ef16eaf8ac8ed3498d098b3af42f5"} Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.061098 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" event={"ID":"180dc212-b361-48e7-ad3e-70328538c4c6","Type":"ContainerStarted","Data":"948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf"} Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.061180 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" event={"ID":"180dc212-b361-48e7-ad3e-70328538c4c6","Type":"ContainerStarted","Data":"731151043c96423f0e1e1f9c61598998cfdb75ca3dce96bb70aaedc223c48364"} Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.061521 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.065126 4721 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-r49kl container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.065404 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" podUID="180dc212-b361-48e7-ad3e-70328538c4c6" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.100647 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.101389 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.601329885 +0000 UTC m=+148.579221213 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.102684 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.103784 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.603768711 +0000 UTC m=+148.581660079 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.106227 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zbcd\" (UniqueName: \"kubernetes.io/projected/e7f127fb-2ee2-4c82-b5c7-1d59743c9141-kube-api-access-9zbcd\") pod \"machine-approver-56656f9798-262dd\" (UID: \"e7f127fb-2ee2-4c82-b5c7-1d59743c9141\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.117854 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc4r5\" (UniqueName: \"kubernetes.io/projected/6e21e239-6ef9-41e5-bc03-dbf7a441fda7-kube-api-access-nc4r5\") pod \"authentication-operator-69f744f599-wrvwq\" (UID: \"6e21e239-6ef9-41e5-bc03-dbf7a441fda7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.132413 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrqsz\" (UniqueName: \"kubernetes.io/projected/786088ab-7a61-4923-9dbd-56ffac4d3230-kube-api-access-rrqsz\") pod \"openshift-apiserver-operator-796bbdcf4f-rr4mf\" (UID: \"786088ab-7a61-4923-9dbd-56ffac4d3230\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.141371 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.148793 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9xpx\" (UniqueName: \"kubernetes.io/projected/305d9e46-16ee-4444-a4af-c58ac4c014f2-kube-api-access-t9xpx\") pod \"controller-manager-879f6c89f-dmhsg\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.172115 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf7fz\" (UniqueName: \"kubernetes.io/projected/ac0251e7-5184-49b4-84ae-ab058632718c-kube-api-access-lf7fz\") pod \"console-f9d7485db-gjhwn\" (UID: \"ac0251e7-5184-49b4-84ae-ab058632718c\") " pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.175780 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.177392 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.194359 4721 request.go:700] Waited for 1.016020606s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmcc-proxy-tls&limit=500&resourceVersion=0 Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.194600 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.200266 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.204864 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.205177 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.705050605 +0000 UTC m=+148.682941883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.206266 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.206847 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.706832431 +0000 UTC m=+148.684723699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.211124 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d35c914-e539-4c91-87b0-df193d6ba649-proxy-tls\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.211476 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.230576 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.239756 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-proxy-tls\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.251467 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.272554 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.279837 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/71fa613a-a051-44cb-9532-0bec74a4c46d-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p8rcl\" (UID: \"71fa613a-a051-44cb-9532-0bec74a4c46d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.291615 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.310629 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.312537 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.812501512 +0000 UTC m=+148.790392770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.313410 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.314102 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.814048521 +0000 UTC m=+148.791939779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.315415 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.330647 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.342822 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8bd725df-b6b1-4de7-a77a-917a4f592b92-secret-volume\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.343772 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cfdebc10-56d8-422a-9905-b601c5e52a3e-profile-collector-cert\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.344570 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-profile-collector-cert\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.352984 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.371632 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.381786 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.382177 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-srv-cert\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.387477 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.390486 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.410807 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.415248 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.415476 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.915441738 +0000 UTC m=+148.893333026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.416137 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.416956 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:30.916918604 +0000 UTC m=+148.894809862 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.422537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/70faefe3-8edc-4aab-8948-0478073732ae-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.430776 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.436613 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-wrvwq"] Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.440738 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dmhsg"] Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.455175 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 21 13:38:30 crc kubenswrapper[4721]: W1121 13:38:30.456170 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e21e239_6ef9_41e5_bc03_dbf7a441fda7.slice/crio-0b253adee12703da41ffc2ff0db95729582e393f2f63307ca81320a27cfd311e WatchSource:0}: Error finding container 0b253adee12703da41ffc2ff0db95729582e393f2f63307ca81320a27cfd311e: Status 404 returned error can't find the container with id 0b253adee12703da41ffc2ff0db95729582e393f2f63307ca81320a27cfd311e Nov 21 13:38:30 crc kubenswrapper[4721]: W1121 13:38:30.461409 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod305d9e46_16ee_4444_a4af_c58ac4c014f2.slice/crio-5b41361b30386b810a519e14ffcb90581184543121fb80fea80b5a6fb65e3b9b WatchSource:0}: Error finding container 5b41361b30386b810a519e14ffcb90581184543121fb80fea80b5a6fb65e3b9b: Status 404 returned error can't find the container with id 5b41361b30386b810a519e14ffcb90581184543121fb80fea80b5a6fb65e3b9b Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.462319 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/407861d7-5569-41aa-bcca-0250e213d7c4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4n9g5\" (UID: \"407861d7-5569-41aa-bcca-0250e213d7c4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.470473 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.477195 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70faefe3-8edc-4aab-8948-0478073732ae-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.490114 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.510665 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.517335 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.517622 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.017596259 +0000 UTC m=+148.995487517 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.517827 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.518618 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.01860985 +0000 UTC m=+148.996501108 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.532829 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.540340 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-images\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.552350 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.558207 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8bd725df-b6b1-4de7-a77a-917a4f592b92-config-volume\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.575418 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.589953 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.605382 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cfdebc10-56d8-422a-9905-b601c5e52a3e-srv-cert\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.609795 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.618868 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.619013 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.118972685 +0000 UTC m=+149.096863943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.619322 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.620271 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.120260495 +0000 UTC m=+149.098151753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.622803 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/36924117-0998-45ef-8887-4a8aa7f5c392-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-r9lmq\" (UID: \"36924117-0998-45ef-8887-4a8aa7f5c392\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.631924 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.636527 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-gjhwn"] Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.649454 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: W1121 13:38:30.652797 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac0251e7_5184_49b4_84ae_ab058632718c.slice/crio-310832fce396521e7301f579dd033bff08c7ad52856cc3f62398c28073af73bb WatchSource:0}: Error finding container 310832fce396521e7301f579dd033bff08c7ad52856cc3f62398c28073af73bb: Status 404 returned error can't find the container with id 310832fce396521e7301f579dd033bff08c7ad52856cc3f62398c28073af73bb Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.670005 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.687315 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf"] Nov 21 13:38:30 crc kubenswrapper[4721]: W1121 13:38:30.709097 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod786088ab_7a61_4923_9dbd_56ffac4d3230.slice/crio-b142fd7220a1b49671f15e41bc86fdbfe80dc3b96c82fbb1ce83293dcf677733 WatchSource:0}: Error finding container b142fd7220a1b49671f15e41bc86fdbfe80dc3b96c82fbb1ce83293dcf677733: Status 404 returned error can't find the container with id b142fd7220a1b49671f15e41bc86fdbfe80dc3b96c82fbb1ce83293dcf677733 Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.710501 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.721445 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.722188 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.222167369 +0000 UTC m=+149.200058627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.729234 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.749792 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.770665 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.790303 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.799320 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-serving-cert\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.810359 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.823634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.824152 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.324132454 +0000 UTC m=+149.302023712 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.829743 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.838169 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-config\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.855725 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.859823 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.869609 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.877292 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.891284 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.893579 4721 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.893914 4721 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.893953 4721 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.894011 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-webhook-cert podName:3cc6633d-c47d-4179-89c9-8ffd18bd0707 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.393843088 +0000 UTC m=+149.371734566 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-webhook-cert") pod "packageserver-d55dfcdfc-pgjvs" (UID: "3cc6633d-c47d-4179-89c9-8ffd18bd0707") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.894047 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-apiservice-cert podName:3cc6633d-c47d-4179-89c9-8ffd18bd0707 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.394034314 +0000 UTC m=+149.371925872 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-apiservice-cert") pod "packageserver-d55dfcdfc-pgjvs" (UID: "3cc6633d-c47d-4179-89c9-8ffd18bd0707") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.894149 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd477a26-ea1b-4159-81c5-e861d8541643-cert podName:cd477a26-ea1b-4159-81c5-e861d8541643 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.394125337 +0000 UTC m=+149.372016615 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cd477a26-ea1b-4159-81c5-e861d8541643-cert") pod "ingress-canary-76xjj" (UID: "cd477a26-ea1b-4159-81c5-e861d8541643") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.895984 4721 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896051 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-certs podName:f5ce7765-3021-4ace-89f4-1b78230d9def nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.396033887 +0000 UTC m=+149.373925145 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-certs") pod "machine-config-server-5fwvr" (UID: "f5ce7765-3021-4ace-89f4-1b78230d9def") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896054 4721 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896117 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-cabundle podName:6566be5a-1fb9-4941-bd16-8c7a122debff nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.396106669 +0000 UTC m=+149.373997927 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-cabundle") pod "service-ca-9c57cc56f-zhzq5" (UID: "6566be5a-1fb9-4941-bd16-8c7a122debff") : failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896678 4721 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896766 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-key podName:6566be5a-1fb9-4941-bd16-8c7a122debff nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.396748529 +0000 UTC m=+149.374639807 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-key") pod "service-ca-9c57cc56f-zhzq5" (UID: "6566be5a-1fb9-4941-bd16-8c7a122debff") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896705 4721 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.896824 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-node-bootstrap-token podName:f5ce7765-3021-4ace-89f4-1b78230d9def nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.396814531 +0000 UTC m=+149.374706049 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-node-bootstrap-token") pod "machine-config-server-5fwvr" (UID: "f5ce7765-3021-4ace-89f4-1b78230d9def") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.897804 4721 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.897918 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/457ed397-53fe-415b-8d9c-855986ed1108-metrics-tls podName:457ed397-53fe-415b-8d9c-855986ed1108 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.397905435 +0000 UTC m=+149.375796713 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/457ed397-53fe-415b-8d9c-855986ed1108-metrics-tls") pod "dns-default-pnmsb" (UID: "457ed397-53fe-415b-8d9c-855986ed1108") : failed to sync secret cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.898110 4721 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.898157 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/457ed397-53fe-415b-8d9c-855986ed1108-config-volume podName:457ed397-53fe-415b-8d9c-855986ed1108 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.398143433 +0000 UTC m=+149.376034921 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/457ed397-53fe-415b-8d9c-855986ed1108-config-volume") pod "dns-default-pnmsb" (UID: "457ed397-53fe-415b-8d9c-855986ed1108") : failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.909615 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.925225 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.925376 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.425349125 +0000 UTC m=+149.403240383 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.925788 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:30 crc kubenswrapper[4721]: E1121 13:38:30.926191 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.426178211 +0000 UTC m=+149.404069479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.929314 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.949683 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.969845 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 21 13:38:30 crc kubenswrapper[4721]: I1121 13:38:30.988977 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.009031 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.026531 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.026781 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.526755603 +0000 UTC m=+149.504646891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.028197 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.028638 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.528620551 +0000 UTC m=+149.506511809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.029417 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.050629 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.068335 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" event={"ID":"e7f127fb-2ee2-4c82-b5c7-1d59743c9141","Type":"ContainerStarted","Data":"009e813d85c5417145031c7af2c8260b4b2ad37810abd26fd3480def881e8a23"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.068442 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" event={"ID":"e7f127fb-2ee2-4c82-b5c7-1d59743c9141","Type":"ContainerStarted","Data":"e05c30cbf36bc6012f1e3549727d651c739b5afbe57ba6c38a4725f9b8264928"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.068465 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" event={"ID":"e7f127fb-2ee2-4c82-b5c7-1d59743c9141","Type":"ContainerStarted","Data":"86ab168477ebd712a98f004102396ac0120ee10cd840637a2ef3143020e0c9bb"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.070377 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.075709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" event={"ID":"85e5fa8a-890a-4ba8-8f3c-6f7e4d16d62a","Type":"ContainerStarted","Data":"3f41e72f74354b6e779ce115821e2cce36ddf92f0b3e7f61d313002c62f11b3c"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.077772 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gjhwn" event={"ID":"ac0251e7-5184-49b4-84ae-ab058632718c","Type":"ContainerStarted","Data":"abae85576c50b6556f32205581e903c7e44bba1aa85775e0197ef62360e16a3f"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.077809 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-gjhwn" event={"ID":"ac0251e7-5184-49b4-84ae-ab058632718c","Type":"ContainerStarted","Data":"310832fce396521e7301f579dd033bff08c7ad52856cc3f62398c28073af73bb"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.079653 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" event={"ID":"786088ab-7a61-4923-9dbd-56ffac4d3230","Type":"ContainerStarted","Data":"29a6f6bda7e76ab968cbd55407d9be9b8cf2508db80330a0bbad7b726f6e0f23"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.079693 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" event={"ID":"786088ab-7a61-4923-9dbd-56ffac4d3230","Type":"ContainerStarted","Data":"b142fd7220a1b49671f15e41bc86fdbfe80dc3b96c82fbb1ce83293dcf677733"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.081478 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" event={"ID":"6e21e239-6ef9-41e5-bc03-dbf7a441fda7","Type":"ContainerStarted","Data":"0e4d7079f9ca2203af08b238b0bff27fce165ffdd6bbc187bdce905b63449053"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.081515 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" event={"ID":"6e21e239-6ef9-41e5-bc03-dbf7a441fda7","Type":"ContainerStarted","Data":"0b253adee12703da41ffc2ff0db95729582e393f2f63307ca81320a27cfd311e"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.084017 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" event={"ID":"3f75b560-06f9-4c2d-94f0-64ae3d351bfa","Type":"ContainerStarted","Data":"b0c5f801c00620ea5546c7b410602d1119c082b6c6cb7714d3e0fbc7101e2bca"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.084063 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" event={"ID":"3f75b560-06f9-4c2d-94f0-64ae3d351bfa","Type":"ContainerStarted","Data":"76c5d4697ed6a5511c5710053eb86635a9ca077a4dabf3d9706d4fec305727e1"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.086681 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" event={"ID":"305d9e46-16ee-4444-a4af-c58ac4c014f2","Type":"ContainerStarted","Data":"7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.086750 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" event={"ID":"305d9e46-16ee-4444-a4af-c58ac4c014f2","Type":"ContainerStarted","Data":"5b41361b30386b810a519e14ffcb90581184543121fb80fea80b5a6fb65e3b9b"} Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.087216 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.091254 4721 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dmhsg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.091325 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.091458 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.093744 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.110181 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.129593 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.129830 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.629788741 +0000 UTC m=+149.607680019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.130918 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.131738 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.631717922 +0000 UTC m=+149.609609400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.133824 4721 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.151409 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.175157 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.191299 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.208810 4721 request.go:700] Waited for 1.927431025s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.217375 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.230448 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.233917 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.234053 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.734035338 +0000 UTC m=+149.711926596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.234226 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.234554 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.734547064 +0000 UTC m=+149.712438322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.250643 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.270764 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.309887 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snrgs\" (UniqueName: \"kubernetes.io/projected/5fc0132e-382a-4fa5-b4a4-602d7cfa461f-kube-api-access-snrgs\") pod \"etcd-operator-b45778765-j4d5j\" (UID: \"5fc0132e-382a-4fa5-b4a4-602d7cfa461f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.327601 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52krc\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-kube-api-access-52krc\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.335518 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.335911 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.835842098 +0000 UTC m=+149.813733386 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.336493 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.336860 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.836840719 +0000 UTC m=+149.814731977 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.345562 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs7nj\" (UniqueName: \"kubernetes.io/projected/5c3ec0e2-ee07-440c-a443-128e4c9b82e7-kube-api-access-rs7nj\") pod \"openshift-controller-manager-operator-756b6f6bc6-6lrzd\" (UID: \"5c3ec0e2-ee07-440c-a443-128e4c9b82e7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.363089 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/97b0bd4b-9511-4ee5-a6ba-d110efff4010-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.381434 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.391055 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttrp9\" (UniqueName: \"kubernetes.io/projected/022a1886-afb6-49ca-801b-2a57240223bf-kube-api-access-ttrp9\") pod \"oauth-openshift-558db77b4-fnzlv\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.409547 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-bound-sa-token\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.428096 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg67d\" (UniqueName: \"kubernetes.io/projected/8b0cfc08-43fe-4026-b4d1-b3aae8c26fde-kube-api-access-cg67d\") pod \"downloads-7954f5f757-vvz5j\" (UID: \"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde\") " pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.441865 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.442153 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/457ed397-53fe-415b-8d9c-855986ed1108-config-volume\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.442292 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-webhook-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443288 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd477a26-ea1b-4159-81c5-e861d8541643-cert\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443352 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-apiservice-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.443380 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:31.943349937 +0000 UTC m=+149.921241305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443452 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-node-bootstrap-token\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443485 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-key\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443530 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/457ed397-53fe-415b-8d9c-855986ed1108-metrics-tls\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443565 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-certs\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.443709 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-cabundle\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.444485 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-cabundle\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.445302 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bj4h\" (UniqueName: \"kubernetes.io/projected/02155afe-72cf-495e-92e5-3375bf701452-kube-api-access-9bj4h\") pod \"openshift-config-operator-7777fb866f-nbq9v\" (UID: \"02155afe-72cf-495e-92e5-3375bf701452\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.447698 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-apiservice-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.448350 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc6633d-c47d-4179-89c9-8ffd18bd0707-webhook-cert\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.448757 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/457ed397-53fe-415b-8d9c-855986ed1108-metrics-tls\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.449257 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-node-bootstrap-token\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.451274 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6566be5a-1fb9-4941-bd16-8c7a122debff-signing-key\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.452308 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5ce7765-3021-4ace-89f4-1b78230d9def-certs\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.466894 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k7lw\" (UniqueName: \"kubernetes.io/projected/97b0bd4b-9511-4ee5-a6ba-d110efff4010-kube-api-access-9k7lw\") pod \"cluster-image-registry-operator-dc59b4c8b-2p2bg\" (UID: \"97b0bd4b-9511-4ee5-a6ba-d110efff4010\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.471504 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cd477a26-ea1b-4159-81c5-e861d8541643-cert\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.471714 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/457ed397-53fe-415b-8d9c-855986ed1108-config-volume\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.498432 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgkmg\" (UniqueName: \"kubernetes.io/projected/a6ce2425-0836-4d18-95b7-cdf56c182b25-kube-api-access-rgkmg\") pod \"machine-api-operator-5694c8668f-tcjm6\" (UID: \"a6ce2425-0836-4d18-95b7-cdf56c182b25\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.513228 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6bts\" (UniqueName: \"kubernetes.io/projected/70faefe3-8edc-4aab-8948-0478073732ae-kube-api-access-h6bts\") pod \"kube-storage-version-migrator-operator-b67b599dd-5ffql\" (UID: \"70faefe3-8edc-4aab-8948-0478073732ae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.531633 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbxwc\" (UniqueName: \"kubernetes.io/projected/f1072b74-e682-4755-ae65-af3f13ab5fe2-kube-api-access-rbxwc\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.544995 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.545992 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.045971022 +0000 UTC m=+150.023862280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.548961 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ft7d\" (UniqueName: \"kubernetes.io/projected/4e4f1353-9937-44bf-99b7-7d9d29e7eb9f-kube-api-access-2ft7d\") pod \"console-operator-58897d9998-26t29\" (UID: \"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f\") " pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.561046 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.568310 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6gdj\" (UniqueName: \"kubernetes.io/projected/407861d7-5569-41aa-bcca-0250e213d7c4-kube-api-access-f6gdj\") pod \"control-plane-machine-set-operator-78cbb6b69f-4n9g5\" (UID: \"407861d7-5569-41aa-bcca-0250e213d7c4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.575494 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.590220 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wkg5\" (UniqueName: \"kubernetes.io/projected/c6fe4d9d-7eef-4287-b166-43800e03e8de-kube-api-access-5wkg5\") pod \"migrator-59844c95c7-pbtkt\" (UID: \"c6fe4d9d-7eef-4287-b166-43800e03e8de\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.599312 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.615994 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qhs9\" (UniqueName: \"kubernetes.io/projected/23cff5ba-db17-46f6-bdcb-fb06aa282ca8-kube-api-access-9qhs9\") pod \"machine-config-operator-74547568cd-wdvzc\" (UID: \"23cff5ba-db17-46f6-bdcb-fb06aa282ca8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.622750 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.637767 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vl8w\" (UniqueName: \"kubernetes.io/projected/7cfe2940-b7fe-4556-882d-e413f1f2a270-kube-api-access-9vl8w\") pod \"router-default-5444994796-rr29s\" (UID: \"7cfe2940-b7fe-4556-882d-e413f1f2a270\") " pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.643516 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.651578 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.652874 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.653008 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.152981925 +0000 UTC m=+150.130873183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.653278 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.652886 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s4mc\" (UniqueName: \"kubernetes.io/projected/71fa613a-a051-44cb-9532-0bec74a4c46d-kube-api-access-8s4mc\") pod \"multus-admission-controller-857f4d67dd-p8rcl\" (UID: \"71fa613a-a051-44cb-9532-0bec74a4c46d\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.653463 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.654039 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.154024218 +0000 UTC m=+150.131915476 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.663847 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.673376 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.683623 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t78gq\" (UniqueName: \"kubernetes.io/projected/ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6-kube-api-access-t78gq\") pod \"service-ca-operator-777779d784-z8qpj\" (UID: \"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.706323 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-j4d5j"] Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.706496 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz7k9\" (UniqueName: \"kubernetes.io/projected/3cc6633d-c47d-4179-89c9-8ffd18bd0707-kube-api-access-mz7k9\") pod \"packageserver-d55dfcdfc-pgjvs\" (UID: \"3cc6633d-c47d-4179-89c9-8ffd18bd0707\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.710573 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v58mf\" (UniqueName: \"kubernetes.io/projected/6a61c6a8-ff2f-47fd-aa02-04abddc2d607-kube-api-access-v58mf\") pod \"csi-hostpathplugin-5w6rn\" (UID: \"6a61c6a8-ff2f-47fd-aa02-04abddc2d607\") " pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.725243 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6npwq\" (UniqueName: \"kubernetes.io/projected/6566be5a-1fb9-4941-bd16-8c7a122debff-kube-api-access-6npwq\") pod \"service-ca-9c57cc56f-zhzq5\" (UID: \"6566be5a-1fb9-4941-bd16-8c7a122debff\") " pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.726947 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.749151 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.752191 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj82b\" (UniqueName: \"kubernetes.io/projected/7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2-kube-api-access-bj82b\") pod \"olm-operator-6b444d44fb-qkfcx\" (UID: \"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.758651 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.758922 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.258887464 +0000 UTC m=+150.236778722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.759089 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.759475 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.259464182 +0000 UTC m=+150.237355440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.759521 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.768883 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a2271db-e669-4c34-94c0-35d7a3ec0468-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-zzjfq\" (UID: \"8a2271db-e669-4c34-94c0-35d7a3ec0468\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.779678 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.787938 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l8mw\" (UniqueName: \"kubernetes.io/projected/eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7-kube-api-access-6l8mw\") pod \"dns-operator-744455d44c-jv2w5\" (UID: \"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7\") " pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.808895 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.810134 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvs95\" (UniqueName: \"kubernetes.io/projected/cd477a26-ea1b-4159-81c5-e861d8541643-kube-api-access-rvs95\") pod \"ingress-canary-76xjj\" (UID: \"cd477a26-ea1b-4159-81c5-e861d8541643\") " pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.818131 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.828845 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06c8c57f-dd7e-4b21-a27d-b2679f84e0b0-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-v8v5q\" (UID: \"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.846298 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.852944 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6nv7\" (UniqueName: \"kubernetes.io/projected/db1c61b3-a406-4b4c-970e-fa47d27a966a-kube-api-access-j6nv7\") pod \"cluster-samples-operator-665b6dd947-pgrjc\" (UID: \"db1c61b3-a406-4b4c-970e-fa47d27a966a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:31 crc kubenswrapper[4721]: W1121 13:38:31.858520 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fc0132e_382a_4fa5_b4a4_602d7cfa461f.slice/crio-786ac615507b1aeddfb4e97be218367069597813d5d51b5299d97d4ba52eb2ff WatchSource:0}: Error finding container 786ac615507b1aeddfb4e97be218367069597813d5d51b5299d97d4ba52eb2ff: Status 404 returned error can't find the container with id 786ac615507b1aeddfb4e97be218367069597813d5d51b5299d97d4ba52eb2ff Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.860253 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.860720 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.360688124 +0000 UTC m=+150.338579382 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.879583 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhgzp\" (UniqueName: \"kubernetes.io/projected/f5ce7765-3021-4ace-89f4-1b78230d9def-kube-api-access-bhgzp\") pod \"machine-config-server-5fwvr\" (UID: \"f5ce7765-3021-4ace-89f4-1b78230d9def\") " pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.881706 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd"] Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.887907 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzxjz\" (UniqueName: \"kubernetes.io/projected/682dcfaa-ba18-421a-bbd2-4c29d22e2373-kube-api-access-nzxjz\") pod \"marketplace-operator-79b997595-78vlb\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.907222 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1072b74-e682-4755-ae65-af3f13ab5fe2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jrq68\" (UID: \"f1072b74-e682-4755-ae65-af3f13ab5fe2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.917524 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vvz5j"] Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.936287 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3ed2815-f151-455e-bbe5-aa4f3c23cf7d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-4s85z\" (UID: \"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.952993 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkbll\" (UniqueName: \"kubernetes.io/projected/1d35c914-e539-4c91-87b0-df193d6ba649-kube-api-access-dkbll\") pod \"machine-config-controller-84d6567774-vk9wg\" (UID: \"1d35c914-e539-4c91-87b0-df193d6ba649\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.959958 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.962120 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:31 crc kubenswrapper[4721]: E1121 13:38:31.962487 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.462471493 +0000 UTC m=+150.440362751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.964634 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q466w\" (UniqueName: \"kubernetes.io/projected/457ed397-53fe-415b-8d9c-855986ed1108-kube-api-access-q466w\") pod \"dns-default-pnmsb\" (UID: \"457ed397-53fe-415b-8d9c-855986ed1108\") " pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:31 crc kubenswrapper[4721]: W1121 13:38:31.968131 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b0cfc08_43fe_4026_b4d1_b3aae8c26fde.slice/crio-cb4e9815d78d26eeaa04f6cb6d61fc7b7c0c9179a698b412f67b6660ebc78fe7 WatchSource:0}: Error finding container cb4e9815d78d26eeaa04f6cb6d61fc7b7c0c9179a698b412f67b6660ebc78fe7: Status 404 returned error can't find the container with id cb4e9815d78d26eeaa04f6cb6d61fc7b7c0c9179a698b412f67b6660ebc78fe7 Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.971670 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.983451 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.991764 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.996437 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn8wz\" (UniqueName: \"kubernetes.io/projected/8bd725df-b6b1-4de7-a77a-917a4f592b92-kube-api-access-tn8wz\") pod \"collect-profiles-29395530-fd2zs\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.997110 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" Nov 21 13:38:31 crc kubenswrapper[4721]: I1121 13:38:31.997437 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-76xjj" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.006979 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.008094 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll5x7\" (UniqueName: \"kubernetes.io/projected/cfdebc10-56d8-422a-9905-b601c5e52a3e-kube-api-access-ll5x7\") pod \"catalog-operator-68c6474976-9m9ck\" (UID: \"cfdebc10-56d8-422a-9905-b601c5e52a3e\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.018451 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.028688 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.034726 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqlqn\" (UniqueName: \"kubernetes.io/projected/36924117-0998-45ef-8887-4a8aa7f5c392-kube-api-access-hqlqn\") pod \"package-server-manager-789f6589d5-r9lmq\" (UID: \"36924117-0998-45ef-8887-4a8aa7f5c392\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.057752 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5fwvr" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.063853 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.064786 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.564759678 +0000 UTC m=+150.542650936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.070408 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.092795 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.099088 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.104693 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.114818 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vvz5j" event={"ID":"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde","Type":"ContainerStarted","Data":"cb4e9815d78d26eeaa04f6cb6d61fc7b7c0c9179a698b412f67b6660ebc78fe7"} Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.115783 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" event={"ID":"5fc0132e-382a-4fa5-b4a4-602d7cfa461f","Type":"ContainerStarted","Data":"786ac615507b1aeddfb4e97be218367069597813d5d51b5299d97d4ba52eb2ff"} Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.116966 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rr29s" event={"ID":"7cfe2940-b7fe-4556-882d-e413f1f2a270","Type":"ContainerStarted","Data":"481e780f62f4b7fdabfd6ca5dca3a55fcd1f8c76d22bd1291a4f8419f59d44d1"} Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.118898 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" event={"ID":"5c3ec0e2-ee07-440c-a443-128e4c9b82e7","Type":"ContainerStarted","Data":"49c1f2e55ea41a215f961173909ecf4864893a355d277bac320918ba347e48c0"} Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.122560 4721 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dmhsg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.122616 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.136553 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.170417 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.173641 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.673623109 +0000 UTC m=+150.651514367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.200653 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:32 crc kubenswrapper[4721]: W1121 13:38:32.214766 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70faefe3_8edc_4aab_8948_0478073732ae.slice/crio-b3d55b23c759d982f9c13411d79c534afbc062259c013b3e52219995e4dccb1a WatchSource:0}: Error finding container b3d55b23c759d982f9c13411d79c534afbc062259c013b3e52219995e4dccb1a: Status 404 returned error can't find the container with id b3d55b23c759d982f9c13411d79c534afbc062259c013b3e52219995e4dccb1a Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.230386 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.237822 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.242475 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg"] Nov 21 13:38:32 crc kubenswrapper[4721]: W1121 13:38:32.247732 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod407861d7_5569_41aa_bcca_0250e213d7c4.slice/crio-749cc2d60f822565be403763b4d0955184364278c753dfd9a55e14e1460d717c WatchSource:0}: Error finding container 749cc2d60f822565be403763b4d0955184364278c753dfd9a55e14e1460d717c: Status 404 returned error can't find the container with id 749cc2d60f822565be403763b4d0955184364278c753dfd9a55e14e1460d717c Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.271791 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.272408 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.772389374 +0000 UTC m=+150.750280632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.323217 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tcjm6"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.357131 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.375229 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.378324 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.878301463 +0000 UTC m=+150.856192711 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.408354 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.499283 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.499929 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:32.999912584 +0000 UTC m=+150.977803842 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: W1121 13:38:32.556499 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a2271db_e669_4c34_94c0_35d7a3ec0468.slice/crio-152471c855192473378a2aada95753c72001bcdc40f27caea55f27f6f698e7a6 WatchSource:0}: Error finding container 152471c855192473378a2aada95753c72001bcdc40f27caea55f27f6f698e7a6: Status 404 returned error can't find the container with id 152471c855192473378a2aada95753c72001bcdc40f27caea55f27f6f698e7a6 Nov 21 13:38:32 crc kubenswrapper[4721]: W1121 13:38:32.575587 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02155afe_72cf_495e_92e5_3375bf701452.slice/crio-38bc518c2d0f5e3490b33d0ffb9a9321869c36d89d8dde5b0ac5fc4fc21d2837 WatchSource:0}: Error finding container 38bc518c2d0f5e3490b33d0ffb9a9321869c36d89d8dde5b0ac5fc4fc21d2837: Status 404 returned error can't find the container with id 38bc518c2d0f5e3490b33d0ffb9a9321869c36d89d8dde5b0ac5fc4fc21d2837 Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.637635 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" podStartSLOduration=119.637614888 podStartE2EDuration="1m59.637614888s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:32.565780647 +0000 UTC m=+150.543671915" watchObservedRunningTime="2025-11-21 13:38:32.637614888 +0000 UTC m=+150.615506146" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.639995 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.640472 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.140456217 +0000 UTC m=+151.118347475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.706879 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-wrvwq" podStartSLOduration=119.706854238 podStartE2EDuration="1m59.706854238s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:32.70629814 +0000 UTC m=+150.684189428" watchObservedRunningTime="2025-11-21 13:38:32.706854238 +0000 UTC m=+150.684745496" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.754015 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.754367 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.254350636 +0000 UTC m=+151.232241894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.856679 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.857154 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.357134696 +0000 UTC m=+151.335025954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.899695 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" podStartSLOduration=119.899678199 podStartE2EDuration="1m59.899678199s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:32.850902471 +0000 UTC m=+150.828793729" watchObservedRunningTime="2025-11-21 13:38:32.899678199 +0000 UTC m=+150.877569447" Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.924814 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.930868 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-26t29"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.950327 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt"] Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.961397 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:32 crc kubenswrapper[4721]: E1121 13:38:32.961821 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.461805766 +0000 UTC m=+151.439697024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:32 crc kubenswrapper[4721]: I1121 13:38:32.972796 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.005981 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5w6rn"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.043076 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.051694 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fnzlv"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.057710 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p8rcl"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.067056 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.067922 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.56790107 +0000 UTC m=+151.545792328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.068534 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rr4mf" podStartSLOduration=120.068507479 podStartE2EDuration="2m0.068507479s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.067323452 +0000 UTC m=+151.045214710" watchObservedRunningTime="2025-11-21 13:38:33.068507479 +0000 UTC m=+151.046398737" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.145687 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zhzq5"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.165275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rr29s" event={"ID":"7cfe2940-b7fe-4556-882d-e413f1f2a270","Type":"ContainerStarted","Data":"fb54ff8c62bcf75a44aff598a164e9e526d66866f202705269adef295e91ffc7"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.173664 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.173782 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.673760367 +0000 UTC m=+151.651651625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.174151 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.175324 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.675312026 +0000 UTC m=+151.653203284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.192716 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-78vlb"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.199809 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" podStartSLOduration=120.199787653 podStartE2EDuration="2m0.199787653s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.197925184 +0000 UTC m=+151.175816442" watchObservedRunningTime="2025-11-21 13:38:33.199787653 +0000 UTC m=+151.177678911" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.201372 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" event={"ID":"97b0bd4b-9511-4ee5-a6ba-d110efff4010","Type":"ContainerStarted","Data":"d63916cdccef8556339bd9ca217dbafb47e2dee90dd9c0322d9b746ce7960c06"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.212326 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj"] Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.233235 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vvz5j" event={"ID":"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde","Type":"ContainerStarted","Data":"08d9afd78ab94c0c2a6219846851aa74b255c04f1cef9a956e1cb020c897a5d1"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.234127 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.246152 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.246216 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.264035 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" event={"ID":"23cff5ba-db17-46f6-bdcb-fb06aa282ca8","Type":"ContainerStarted","Data":"fec6ba382fd933a9fe7345d2ff0dd659a5c1127cfaa73484bfe2629e1aa82f3a"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.267963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" event={"ID":"5fc0132e-382a-4fa5-b4a4-602d7cfa461f","Type":"ContainerStarted","Data":"09f693f218595b65b96c1f1103cd937f2dc993a6a2f22fa86e8239d70f19d6b2"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.270707 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-26t29" event={"ID":"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f","Type":"ContainerStarted","Data":"61a45ce9c343548a979368c591e104bc0b1303bdd009ee8ee2aa21cf073ca32d"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.273737 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" event={"ID":"a6ce2425-0836-4d18-95b7-cdf56c182b25","Type":"ContainerStarted","Data":"d785295573568362a237230c83beb41612c6c16a79c184c508da015b499991cd"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.274898 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.275373 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.77532931 +0000 UTC m=+151.753220568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.276690 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" event={"ID":"5c3ec0e2-ee07-440c-a443-128e4c9b82e7","Type":"ContainerStarted","Data":"357e703ffea9069226f3b8025e9e162f1c3938041b97e76b71bc88d09d0e0a6d"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.277967 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5fwvr" event={"ID":"f5ce7765-3021-4ace-89f4-1b78230d9def","Type":"ContainerStarted","Data":"2201c27d693f9f25847ddbeaf875eb0030b26a214a0f0ec87aa018b937e9b0d8"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.280008 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" event={"ID":"02155afe-72cf-495e-92e5-3375bf701452","Type":"ContainerStarted","Data":"38bc518c2d0f5e3490b33d0ffb9a9321869c36d89d8dde5b0ac5fc4fc21d2837"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.288435 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" event={"ID":"8a2271db-e669-4c34-94c0-35d7a3ec0468","Type":"ContainerStarted","Data":"152471c855192473378a2aada95753c72001bcdc40f27caea55f27f6f698e7a6"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.290493 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" event={"ID":"407861d7-5569-41aa-bcca-0250e213d7c4","Type":"ContainerStarted","Data":"749cc2d60f822565be403763b4d0955184364278c753dfd9a55e14e1460d717c"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.297917 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" event={"ID":"70faefe3-8edc-4aab-8948-0478073732ae","Type":"ContainerStarted","Data":"b3d55b23c759d982f9c13411d79c534afbc062259c013b3e52219995e4dccb1a"} Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.299193 4721 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dmhsg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.299233 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.379103 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.380516 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.880493975 +0000 UTC m=+151.858385233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.480724 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.481946 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:33.981906723 +0000 UTC m=+151.959798031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.525309 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-262dd" podStartSLOduration=120.525294022 podStartE2EDuration="2m0.525294022s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.523552688 +0000 UTC m=+151.501443946" watchObservedRunningTime="2025-11-21 13:38:33.525294022 +0000 UTC m=+151.503185280" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.582388 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.582880 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.082863586 +0000 UTC m=+152.060754844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: W1121 13:38:33.652827 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ae13ea2_b5b4_4f36_bc14_75425e3c3ab2.slice/crio-09e1635662bf27b0fa4f93b1fd9726a9db22fb1e64a6e612a156f01e0cc19b8d WatchSource:0}: Error finding container 09e1635662bf27b0fa4f93b1fd9726a9db22fb1e64a6e612a156f01e0cc19b8d: Status 404 returned error can't find the container with id 09e1635662bf27b0fa4f93b1fd9726a9db22fb1e64a6e612a156f01e0cc19b8d Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.664827 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-gjhwn" podStartSLOduration=120.663700939 podStartE2EDuration="2m0.663700939s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.653356075 +0000 UTC m=+151.631247333" watchObservedRunningTime="2025-11-21 13:38:33.663700939 +0000 UTC m=+151.641592207" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.683138 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.683517 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.18350152 +0000 UTC m=+152.161392778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.768520 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.774732 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.774812 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.788271 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.788856 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.28883778 +0000 UTC m=+152.266729038 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.861036 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" podStartSLOduration=120.861004912 podStartE2EDuration="2m0.861004912s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.806620128 +0000 UTC m=+151.784511396" watchObservedRunningTime="2025-11-21 13:38:33.861004912 +0000 UTC m=+151.838896170" Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.917545 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:33 crc kubenswrapper[4721]: E1121 13:38:33.918841 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.418808333 +0000 UTC m=+152.396699591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:33 crc kubenswrapper[4721]: I1121 13:38:33.957277 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6lrzd" podStartSLOduration=120.957236927 podStartE2EDuration="2m0.957236927s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.956451152 +0000 UTC m=+151.934342420" watchObservedRunningTime="2025-11-21 13:38:33.957236927 +0000 UTC m=+151.935128185" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.034148 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-j4d5j" podStartSLOduration=121.034113596 podStartE2EDuration="2m1.034113596s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:33.98191617 +0000 UTC m=+151.959807428" watchObservedRunningTime="2025-11-21 13:38:34.034113596 +0000 UTC m=+152.012004874" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.039713 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.040038 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.540029121 +0000 UTC m=+152.517920379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.068172 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-vvz5j" podStartSLOduration=121.068143032 podStartE2EDuration="2m1.068143032s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:34.038860575 +0000 UTC m=+152.016751833" watchObservedRunningTime="2025-11-21 13:38:34.068143032 +0000 UTC m=+152.046034290" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.086863 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-rr29s" podStartSLOduration=121.086837178 podStartE2EDuration="2m1.086837178s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:34.083560855 +0000 UTC m=+152.061452113" watchObservedRunningTime="2025-11-21 13:38:34.086837178 +0000 UTC m=+152.064728436" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.118678 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.126330 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.127595 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-pnmsb"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.144358 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.144853 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.644829385 +0000 UTC m=+152.622720643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.179235 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.179940 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.231297 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.231352 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.248724 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.252490 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.752472688 +0000 UTC m=+152.730363946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.281863 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-76xjj"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.316848 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.320915 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jv2w5"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.338448 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" event={"ID":"70faefe3-8edc-4aab-8948-0478073732ae","Type":"ContainerStarted","Data":"f23c74b5c8f34452ec54a6bdf072823d5a9f49206005c8100265befd0d6b10d4"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.349763 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.349894 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.84986695 +0000 UTC m=+152.827758208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.350378 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-pnmsb" event={"ID":"457ed397-53fe-415b-8d9c-855986ed1108","Type":"ContainerStarted","Data":"90ac3553123d13a344886b2c67b47ad554fe7b4a915bcf7d27e4af90d9983ad3"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.350429 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.350537 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z"] Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.350819 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.850802389 +0000 UTC m=+152.828693647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.382474 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" event={"ID":"6566be5a-1fb9-4941-bd16-8c7a122debff","Type":"ContainerStarted","Data":"1792958421d388d1242ab6fb0f2a5690a8d1cd62466224c7a5e1642fa51be620"} Nov 21 13:38:34 crc kubenswrapper[4721]: W1121 13:38:34.400457 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd477a26_ea1b_4159_81c5_e861d8541643.slice/crio-3821c0efd3fc5af2c817431a893f152cb85c66e4153af54c48a56694fd54a6cd WatchSource:0}: Error finding container 3821c0efd3fc5af2c817431a893f152cb85c66e4153af54c48a56694fd54a6cd: Status 404 returned error can't find the container with id 3821c0efd3fc5af2c817431a893f152cb85c66e4153af54c48a56694fd54a6cd Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.400906 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.400982 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" event={"ID":"97b0bd4b-9511-4ee5-a6ba-d110efff4010","Type":"ContainerStarted","Data":"c550db5112a855c69d2349fc5322384cdb7e7be52abd2d9791b581001bfe0fcd"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.446406 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-26t29" event={"ID":"4e4f1353-9937-44bf-99b7-7d9d29e7eb9f","Type":"ContainerStarted","Data":"9b3b19194317eb833b4dbf27ccfdafe70c0421206cf221e76e3f655abb46885a"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.450621 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.451188 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.451507 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.951482664 +0000 UTC m=+152.929373922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.452804 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.453261 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:34.953248869 +0000 UTC m=+152.931140127 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.455166 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" event={"ID":"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6","Type":"ContainerStarted","Data":"04aa7037e6aaeb6afecd8c4794f4c6d705238eb4b4414f037a5d44d7d1bb944d"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.465413 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.470012 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.480343 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" event={"ID":"022a1886-afb6-49ca-801b-2a57240223bf","Type":"ContainerStarted","Data":"bc00ff42ef23cfe330d3a347c64de0699abda6524f0e98a9aaca661ccf753e2f"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.511887 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" event={"ID":"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2","Type":"ContainerStarted","Data":"09e1635662bf27b0fa4f93b1fd9726a9db22fb1e64a6e612a156f01e0cc19b8d"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.535845 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" event={"ID":"3cc6633d-c47d-4179-89c9-8ffd18bd0707","Type":"ContainerStarted","Data":"94ac6f255b334b9741f8e4ab6710c55e1981f13a3d05353b8ef0dcc335998e50"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.535959 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5ffql" podStartSLOduration=121.53592808 podStartE2EDuration="2m1.53592808s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:34.492789478 +0000 UTC m=+152.470680746" watchObservedRunningTime="2025-11-21 13:38:34.53592808 +0000 UTC m=+152.513819338" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.550080 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" event={"ID":"c6fe4d9d-7eef-4287-b166-43800e03e8de","Type":"ContainerStarted","Data":"38f44dddb2eca8758c64864132ba1927a111d434490aa8f8831c63105550a1d2"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.550606 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" event={"ID":"c6fe4d9d-7eef-4287-b166-43800e03e8de","Type":"ContainerStarted","Data":"a4eea07d11ef97c6ca16cac40eaea71ddb78b333d120a2967b5f0c296a77416c"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.560940 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.561437 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.061414359 +0000 UTC m=+153.039305617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.566377 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" event={"ID":"407861d7-5569-41aa-bcca-0250e213d7c4","Type":"ContainerStarted","Data":"bcfea58a76231ceebc9f8065e645c2fbc7a7de1c1826b39287de206d40e26e77"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.569366 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg"] Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.582452 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2p2bg" podStartSLOduration=121.582432507 podStartE2EDuration="2m1.582432507s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:34.543164767 +0000 UTC m=+152.521056025" watchObservedRunningTime="2025-11-21 13:38:34.582432507 +0000 UTC m=+152.560323765" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.590566 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" event={"ID":"682dcfaa-ba18-421a-bbd2-4c29d22e2373","Type":"ContainerStarted","Data":"fee435f27623936b652b5da3307bdb7eba25dcaa58a5ad65e20426e9087792d3"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.591308 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" event={"ID":"02155afe-72cf-495e-92e5-3375bf701452","Type":"ContainerStarted","Data":"9d1b42083bb4d52391e4146e328cafbd499bc95ccdbe8d7695bd4a95b12d39d5"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.597791 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" event={"ID":"8a2271db-e669-4c34-94c0-35d7a3ec0468","Type":"ContainerStarted","Data":"8a1027e229940f0f2ff33d8000db1488b42964372cf646ed7ac414882bce6c84"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.618374 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" event={"ID":"6a61c6a8-ff2f-47fd-aa02-04abddc2d607","Type":"ContainerStarted","Data":"90e3144c36d3784feb7eedecf991d07577cf01bd167d342c95c9a88827a834fc"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.621742 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4n9g5" podStartSLOduration=121.621702018 podStartE2EDuration="2m1.621702018s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:34.618442005 +0000 UTC m=+152.596333263" watchObservedRunningTime="2025-11-21 13:38:34.621702018 +0000 UTC m=+152.599593276" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.663683 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.669857 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.169840906 +0000 UTC m=+153.147732164 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.689376 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-zzjfq" podStartSLOduration=121.689352167 podStartE2EDuration="2m1.689352167s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:34.686786297 +0000 UTC m=+152.664677555" watchObservedRunningTime="2025-11-21 13:38:34.689352167 +0000 UTC m=+152.667243425" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.718139 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" event={"ID":"f1072b74-e682-4755-ae65-af3f13ab5fe2","Type":"ContainerStarted","Data":"0ad7286e24eb4e659f798c4fe2033a08931ffcecad74d6cc0f9b5c3ec57d38d4"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.740712 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" event={"ID":"71fa613a-a051-44cb-9532-0bec74a4c46d","Type":"ContainerStarted","Data":"fe6fd3456328beba3019e3278cadaa80dea7f43fccd9be946858eea641d527b6"} Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.746281 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.746366 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.764969 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.768234 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.268195298 +0000 UTC m=+153.246086556 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.776937 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.784090 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.284040834 +0000 UTC m=+153.261932092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.784440 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-nbcn9" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.882974 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.883308 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.383264784 +0000 UTC m=+153.361156032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.883626 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.884197 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.384185472 +0000 UTC m=+153.362076730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.948988 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:34 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:34 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:34 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.949085 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:34 crc kubenswrapper[4721]: I1121 13:38:34.984888 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:34 crc kubenswrapper[4721]: E1121 13:38:34.985347 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.485322132 +0000 UTC m=+153.463213420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.088536 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.089210 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.589194436 +0000 UTC m=+153.567085694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.189687 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.189993 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.689978934 +0000 UTC m=+153.667870192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.291405 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.292028 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.791998401 +0000 UTC m=+153.769889819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.358266 4721 patch_prober.go:28] interesting pod/apiserver-76f77b778f-xhzsg container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]log ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]etcd ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/generic-apiserver-start-informers ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/max-in-flight-filter ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/image.openshift.io-apiserver-caches ok Nov 21 13:38:35 crc kubenswrapper[4721]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Nov 21 13:38:35 crc kubenswrapper[4721]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/project.openshift.io-projectcache ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-startinformers ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/openshift.io-restmapperupdater ok Nov 21 13:38:35 crc kubenswrapper[4721]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 21 13:38:35 crc kubenswrapper[4721]: livez check failed Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.358318 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" podUID="3f75b560-06f9-4c2d-94f0-64ae3d351bfa" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.391925 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.392271 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.892221432 +0000 UTC m=+153.870112700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.392451 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.392759 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.892745358 +0000 UTC m=+153.870636616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.432564 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.493072 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.493239 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.993207096 +0000 UTC m=+153.971098364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.493413 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.493721 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:35.993709952 +0000 UTC m=+153.971601210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.594855 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.595047 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.095000396 +0000 UTC m=+154.072891654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.595241 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.595665 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.095646296 +0000 UTC m=+154.073537554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.696793 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.697147 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.197107855 +0000 UTC m=+154.174999113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.697380 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.697862 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.197838028 +0000 UTC m=+154.175729446 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.758591 4721 generic.go:334] "Generic (PLEG): container finished" podID="02155afe-72cf-495e-92e5-3375bf701452" containerID="9d1b42083bb4d52391e4146e328cafbd499bc95ccdbe8d7695bd4a95b12d39d5" exitCode=0 Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.758692 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" event={"ID":"02155afe-72cf-495e-92e5-3375bf701452","Type":"ContainerDied","Data":"9d1b42083bb4d52391e4146e328cafbd499bc95ccdbe8d7695bd4a95b12d39d5"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.760423 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" event={"ID":"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0","Type":"ContainerStarted","Data":"10d224c122839f03c9722913d3b0aa9e7ccc3a4ada35428c44989051376f104b"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.764437 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:35 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:35 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:35 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.764497 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.771369 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" event={"ID":"db1c61b3-a406-4b4c-970e-fa47d27a966a","Type":"ContainerStarted","Data":"ca025a2453f66aceda46255e3d7f0041b31de161f3ba1a8728a3747039edd5b6"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.798389 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.799245 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.299224455 +0000 UTC m=+154.277115713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.824390 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" event={"ID":"022a1886-afb6-49ca-801b-2a57240223bf","Type":"ContainerStarted","Data":"44b41c4d5171d98e8966337fae6844f89345319db26dbd5c9fc04b23352de921"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.825311 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.827616 4721 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-fnzlv container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.827658 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.851750 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" event={"ID":"7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2","Type":"ContainerStarted","Data":"a20a355c9f67610d497c5aead1b338e0f419f59e017a0c5b1f018dbbd4d9b375"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.852710 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.855305 4721 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-qkfcx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.855351 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" podUID="7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.861167 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" event={"ID":"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7","Type":"ContainerStarted","Data":"87c5f22d43675a08257934af9f7619b87a7ea027ea30e75d175c9e40acefcdd5"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.862441 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" event={"ID":"23cff5ba-db17-46f6-bdcb-fb06aa282ca8","Type":"ContainerStarted","Data":"e9452fe67a8680110c407f9cfbe60c35b9d71a0fd7bf3a4cf79c47405bde0c50"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.863619 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" event={"ID":"f1072b74-e682-4755-ae65-af3f13ab5fe2","Type":"ContainerStarted","Data":"1dc50e896bc914d10999453e192bdbd8f0f0b3095bb7469ad4fde97f8770f5ab"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.864956 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" event={"ID":"71fa613a-a051-44cb-9532-0bec74a4c46d","Type":"ContainerStarted","Data":"ad5e45cb412d1882b2a96534243a740441f9e1ce7707c6591952f62c8ca65581"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.865856 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" event={"ID":"1d35c914-e539-4c91-87b0-df193d6ba649","Type":"ContainerStarted","Data":"3c9fa8b686df9b6e6364e30bc9a4fbb4f6cb01631c59783017a53da13d1e36f1"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.867102 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" event={"ID":"6566be5a-1fb9-4941-bd16-8c7a122debff","Type":"ContainerStarted","Data":"a22e6689f18030d228cf51874659ec1e312981f92dc4483ffd670bb1e7416c36"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.873029 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5fwvr" event={"ID":"f5ce7765-3021-4ace-89f4-1b78230d9def","Type":"ContainerStarted","Data":"4fd3bbeff5b87dc70a6c7bd827ed1ccbe2b8386473a901aa10b0003c9ffeef3b"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.874524 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" event={"ID":"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d","Type":"ContainerStarted","Data":"a4a961c5582f5751638e0177dd18dd98dfd00b29013273e7d60cf31052f1aa0a"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.875711 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-76xjj" event={"ID":"cd477a26-ea1b-4159-81c5-e861d8541643","Type":"ContainerStarted","Data":"85dd66bd9d97c0836e0587f673fd6d60d81df9becd068a3a685fd432afffa9dd"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.875745 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-76xjj" event={"ID":"cd477a26-ea1b-4159-81c5-e861d8541643","Type":"ContainerStarted","Data":"3821c0efd3fc5af2c817431a893f152cb85c66e4153af54c48a56694fd54a6cd"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.876762 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" event={"ID":"ffcd67ab-3e09-4eea-9ba2-d1f4a210ccc6","Type":"ContainerStarted","Data":"edf3b70b8c335a24695206143e3656ccb897701bad1e9e4ea48ac2fe2cb3bd53"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.878317 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" event={"ID":"3cc6633d-c47d-4179-89c9-8ffd18bd0707","Type":"ContainerStarted","Data":"ee59da8443907529cbf61914bd9d2a3a8bdb997263aa59fea34b74914ae88fbc"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.878871 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.880207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" event={"ID":"8bd725df-b6b1-4de7-a77a-917a4f592b92","Type":"ContainerStarted","Data":"873007ba5515c42c241cd2ed56ef18e88911032a116dd141bcf2c95609b3540f"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.880865 4721 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pgjvs container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.880914 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" podUID="3cc6633d-c47d-4179-89c9-8ffd18bd0707" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.881350 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" event={"ID":"a6ce2425-0836-4d18-95b7-cdf56c182b25","Type":"ContainerStarted","Data":"691921fe5d5dfd3b3d9dbd97c0591dce1a065bbb2f71e0630564650f44e60905"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.882627 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" event={"ID":"682dcfaa-ba18-421a-bbd2-4c29d22e2373","Type":"ContainerStarted","Data":"83282bec31a0e925fcb4bdb2a42e8cfcbcce89c5fa175be555b298b81caee165"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.883406 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.885219 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" event={"ID":"36924117-0998-45ef-8887-4a8aa7f5c392","Type":"ContainerStarted","Data":"387a942e0c6515effe1fb9cb7330d333d2494bd9ecff8e6b278564685fc95e10"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.885409 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-78vlb container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.885661 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.887565 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" event={"ID":"cfdebc10-56d8-422a-9905-b601c5e52a3e","Type":"ContainerStarted","Data":"eb95297f883ccd86c26cae85678ac091f459eb763124637d871c534c5d6b4272"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.887600 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" event={"ID":"cfdebc10-56d8-422a-9905-b601c5e52a3e","Type":"ContainerStarted","Data":"8d424fd218fcbced3cd5dbdca0038b3a0dad26404fd80da7bab5d30ae29ac0de"} Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.890090 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.891665 4721 patch_prober.go:28] interesting pod/console-operator-58897d9998-26t29 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/readyz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.891706 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-26t29" podUID="4e4f1353-9937-44bf-99b7-7d9d29e7eb9f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/readyz\": dial tcp 10.217.0.21:8443: connect: connection refused" Nov 21 13:38:35 crc kubenswrapper[4721]: I1121 13:38:35.900641 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:35 crc kubenswrapper[4721]: E1121 13:38:35.903010 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.402996066 +0000 UTC m=+154.380887324 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.001526 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.001839 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.501805573 +0000 UTC m=+154.479696841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.002294 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.004245 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.504232689 +0000 UTC m=+154.482123947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.006796 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" podStartSLOduration=123.006782289 podStartE2EDuration="2m3.006782289s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:35.977609884 +0000 UTC m=+153.955501142" watchObservedRunningTime="2025-11-21 13:38:36.006782289 +0000 UTC m=+153.984673547" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.007613 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-zhzq5" podStartSLOduration=123.007607294 podStartE2EDuration="2m3.007607294s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.005203369 +0000 UTC m=+153.983094617" watchObservedRunningTime="2025-11-21 13:38:36.007607294 +0000 UTC m=+153.985498552" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.031915 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-5fwvr" podStartSLOduration=7.031897515 podStartE2EDuration="7.031897515s" podCreationTimestamp="2025-11-21 13:38:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.025721322 +0000 UTC m=+154.003612580" watchObservedRunningTime="2025-11-21 13:38:36.031897515 +0000 UTC m=+154.009788773" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.081783 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" podStartSLOduration=123.081768398 podStartE2EDuration="2m3.081768398s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.052563393 +0000 UTC m=+154.030454661" watchObservedRunningTime="2025-11-21 13:38:36.081768398 +0000 UTC m=+154.059659656" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.082511 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" podStartSLOduration=123.082506261 podStartE2EDuration="2m3.082506261s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.080313523 +0000 UTC m=+154.058204791" watchObservedRunningTime="2025-11-21 13:38:36.082506261 +0000 UTC m=+154.060397519" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.104212 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.104408 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.604377917 +0000 UTC m=+154.582269175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.104619 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.104994 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.604979345 +0000 UTC m=+154.582870693 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.128226 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" podStartSLOduration=123.128206953 podStartE2EDuration="2m3.128206953s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.1265044 +0000 UTC m=+154.104395668" watchObservedRunningTime="2025-11-21 13:38:36.128206953 +0000 UTC m=+154.106098211" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.144028 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-26t29" podStartSLOduration=123.144000168 podStartE2EDuration="2m3.144000168s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.1411934 +0000 UTC m=+154.119084658" watchObservedRunningTime="2025-11-21 13:38:36.144000168 +0000 UTC m=+154.121891426" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.159646 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-z8qpj" podStartSLOduration=123.159629058 podStartE2EDuration="2m3.159629058s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.158702749 +0000 UTC m=+154.136594017" watchObservedRunningTime="2025-11-21 13:38:36.159629058 +0000 UTC m=+154.137520316" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.205341 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.205741 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.705723922 +0000 UTC m=+154.683615180 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.307046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.307412 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.807396788 +0000 UTC m=+154.785288046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.408602 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.408835 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.908804855 +0000 UTC m=+154.886696113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.408916 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.409278 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:36.909268519 +0000 UTC m=+154.887159777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.510515 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.510662 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.010645986 +0000 UTC m=+154.988537244 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.510773 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.511026 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.011018348 +0000 UTC m=+154.988909606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.611863 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.612038 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.112012892 +0000 UTC m=+155.089904150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.612304 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.612618 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.112609691 +0000 UTC m=+155.090500949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.713323 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.713666 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.213622156 +0000 UTC m=+155.191513444 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.714143 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.714487 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.214471943 +0000 UTC m=+155.192363201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.764493 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:36 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:36 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:36 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.764576 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.815675 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.815876 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.315845669 +0000 UTC m=+155.293736927 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.816200 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.816513 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.31650005 +0000 UTC m=+155.294391308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.892669 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" event={"ID":"c6fe4d9d-7eef-4287-b166-43800e03e8de","Type":"ContainerStarted","Data":"db2045f6c57c28c39b60b1fec5af408ac32c25857077e128aa8f611e84629ec8"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.894357 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" event={"ID":"23cff5ba-db17-46f6-bdcb-fb06aa282ca8","Type":"ContainerStarted","Data":"a68be27ff99b09d94043c0cfa3976ec39b97cc2cb5a99f09606455dae0ce45af"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.895749 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" event={"ID":"06c8c57f-dd7e-4b21-a27d-b2679f84e0b0","Type":"ContainerStarted","Data":"07b31eebdbad3150884bdc0f9daf4b1f01a9299f1a1dc5ba9e779ec638315fb2"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.897094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" event={"ID":"db1c61b3-a406-4b4c-970e-fa47d27a966a","Type":"ContainerStarted","Data":"53ea0b99b4e04edcc4a24607b752efe408b91047bd7559d578709520f7b1ed76"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.898927 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" event={"ID":"a6ce2425-0836-4d18-95b7-cdf56c182b25","Type":"ContainerStarted","Data":"eaab4b787e2e049f66afced309629e545fd0a3f46496636f760f22d0359d923f"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.900208 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" event={"ID":"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7","Type":"ContainerStarted","Data":"64ec62853701dac1a3245db7e480a5a25b9ce310124c6235a3786565dc8cbee0"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.901324 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-pnmsb" event={"ID":"457ed397-53fe-415b-8d9c-855986ed1108","Type":"ContainerStarted","Data":"ff9707c79f8d831413a5f73bd6bdb59f459939120b373377daaf5b3c0fe6cb17"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.902479 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" event={"ID":"36924117-0998-45ef-8887-4a8aa7f5c392","Type":"ContainerStarted","Data":"d7297d33a23f0cc6a04606c3e0c0a3f79d5f8ed6c068877f74ff9dd145ac2c24"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.903515 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" event={"ID":"8bd725df-b6b1-4de7-a77a-917a4f592b92","Type":"ContainerStarted","Data":"89ad936f9f2d3995e92789f5dbe5486dd791bb9a77d3cbd01c7872ebd7b26fef"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.904803 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" event={"ID":"1d35c914-e539-4c91-87b0-df193d6ba649","Type":"ContainerStarted","Data":"6e3dd467dc2cacabba28e3ac5f71aae4814c6367c8d6df316e1b6c437c5442df"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.906709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" event={"ID":"02155afe-72cf-495e-92e5-3375bf701452","Type":"ContainerStarted","Data":"f4f83dada85d448d0daf24a70628674b11fe8088f895eaa1592adcd5ac213b4b"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.908304 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" event={"ID":"f3ed2815-f151-455e-bbe5-aa4f3c23cf7d","Type":"ContainerStarted","Data":"2e880a9f81d4b361346d6490e4eeb90df37ee75e39218293de52579251ee02d1"} Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.908692 4721 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pgjvs container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.908735 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" podUID="3cc6633d-c47d-4179-89c9-8ffd18bd0707" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.909151 4721 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-qkfcx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.909208 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" podUID="7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.909255 4721 patch_prober.go:28] interesting pod/console-operator-58897d9998-26t29 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/readyz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.909282 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-26t29" podUID="4e4f1353-9937-44bf-99b7-7d9d29e7eb9f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/readyz\": dial tcp 10.217.0.21:8443: connect: connection refused" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.909532 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-78vlb container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.909565 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.910289 4721 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-fnzlv container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.910327 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.917286 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:36 crc kubenswrapper[4721]: E1121 13:38:36.917757 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.417742282 +0000 UTC m=+155.395633540 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:36 crc kubenswrapper[4721]: I1121 13:38:36.932640 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-76xjj" podStartSLOduration=8.932621748 podStartE2EDuration="8.932621748s" podCreationTimestamp="2025-11-21 13:38:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.928237211 +0000 UTC m=+154.906128509" watchObservedRunningTime="2025-11-21 13:38:36.932621748 +0000 UTC m=+154.910513006" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.018996 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.020330 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.520309666 +0000 UTC m=+155.498200934 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.120479 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.120596 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.620573228 +0000 UTC m=+155.598464506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.120707 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.121002 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.620994741 +0000 UTC m=+155.598885999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.221892 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.222130 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.722051967 +0000 UTC m=+155.699943225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.323131 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.323472 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.823460715 +0000 UTC m=+155.801351973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.424196 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.424333 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.924315115 +0000 UTC m=+155.902206373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.424444 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.424681 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:37.924673776 +0000 UTC m=+155.902565034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.525738 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.525982 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.0259504 +0000 UTC m=+156.003841718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.526025 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.526567 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.026547109 +0000 UTC m=+156.004438377 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.627789 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.627933 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.127912665 +0000 UTC m=+156.105803923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.628132 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.628434 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.128425081 +0000 UTC m=+156.106316349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.729011 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.729193 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.229167008 +0000 UTC m=+156.207058266 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.729423 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.729729 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.229717695 +0000 UTC m=+156.207608953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.764016 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:37 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:37 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:37 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.764133 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.831436 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.831609 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.331587477 +0000 UTC m=+156.309478745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.831894 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.832251 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.332233047 +0000 UTC m=+156.310124305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914004 4721 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-fnzlv container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" start-of-body= Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914046 4721 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pgjvs container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914128 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" podUID="3cc6633d-c47d-4179-89c9-8ffd18bd0707" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914077 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.10:6443/healthz\": dial tcp 10.217.0.10:6443: connect: connection refused" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914301 4721 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-qkfcx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914326 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" podUID="7ae13ea2-b5b4-4f36-bc14-75425e3c3ab2" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914639 4721 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-78vlb container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.914721 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.930268 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-v8v5q" podStartSLOduration=124.930248638 podStartE2EDuration="2m4.930248638s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:37.928128492 +0000 UTC m=+155.906019770" watchObservedRunningTime="2025-11-21 13:38:37.930248638 +0000 UTC m=+155.908139896" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.930929 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" podStartSLOduration=124.93092285 podStartE2EDuration="2m4.93092285s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:36.963451824 +0000 UTC m=+154.941343102" watchObservedRunningTime="2025-11-21 13:38:37.93092285 +0000 UTC m=+155.908814118" Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.932769 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:37 crc kubenswrapper[4721]: E1121 13:38:37.932965 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.432939143 +0000 UTC m=+156.410830401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:37 crc kubenswrapper[4721]: I1121 13:38:37.948247 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-4s85z" podStartSLOduration=124.948225692 podStartE2EDuration="2m4.948225692s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:37.94720305 +0000 UTC m=+155.925094318" watchObservedRunningTime="2025-11-21 13:38:37.948225692 +0000 UTC m=+155.926116950" Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.034491 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.034956 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.534937779 +0000 UTC m=+156.512829027 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.139337 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.139519 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.639481275 +0000 UTC m=+156.617372543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.139677 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.139984 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.63997166 +0000 UTC m=+156.617862928 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.240335 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.240454 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.740432268 +0000 UTC m=+156.718323536 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.240578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.240906 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.740895352 +0000 UTC m=+156.718786630 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.342008 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.342349 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.842324331 +0000 UTC m=+156.820215619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.342537 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.342950 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.84293454 +0000 UTC m=+156.820825818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.443873 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.444124 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:38.944091949 +0000 UTC m=+156.921983227 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.546556 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.547138 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.047111268 +0000 UTC m=+157.025002566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.647403 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.647566 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.147539564 +0000 UTC m=+157.125430822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.647714 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.648033 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.148019859 +0000 UTC m=+157.125911117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.748684 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.748916 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.24887975 +0000 UTC m=+157.226771048 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.749047 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.749549 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.24952248 +0000 UTC m=+157.227413758 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.763927 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:38 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:38 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:38 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.764054 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.850164 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.850426 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.350390311 +0000 UTC m=+157.328281609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.850648 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.851144 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.351120874 +0000 UTC m=+157.329012162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.927419 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" event={"ID":"71fa613a-a051-44cb-9532-0bec74a4c46d","Type":"ContainerStarted","Data":"a94608fa9fefc8086ad660f9d410e283df857b90a7751c088523e750a6868666"} Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.949824 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-wdvzc" podStartSLOduration=125.949797715 podStartE2EDuration="2m5.949797715s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:38.944981505 +0000 UTC m=+156.922872773" watchObservedRunningTime="2025-11-21 13:38:38.949797715 +0000 UTC m=+156.927689013" Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.951837 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.952016 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.451983304 +0000 UTC m=+157.429874562 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.952108 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:38 crc kubenswrapper[4721]: E1121 13:38:38.952610 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.452591843 +0000 UTC m=+157.430483101 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:38 crc kubenswrapper[4721]: I1121 13:38:38.968371 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-tcjm6" podStartSLOduration=125.968317136 podStartE2EDuration="2m5.968317136s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:38.964185776 +0000 UTC m=+156.942077084" watchObservedRunningTime="2025-11-21 13:38:38.968317136 +0000 UTC m=+156.946208414" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.053251 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.054316 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.55429716 +0000 UTC m=+157.532188428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.155447 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.155955 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.655933995 +0000 UTC m=+157.633825243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.184005 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.191981 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-xhzsg" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.297660 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.297762 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.797738788 +0000 UTC m=+157.775630056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.298046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.300077 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.80004493 +0000 UTC m=+157.777936198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.403811 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.403983 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.903948856 +0000 UTC m=+157.881840114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.404230 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.404518 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:39.904509784 +0000 UTC m=+157.882401042 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.505306 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.505454 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.005432616 +0000 UTC m=+157.983323874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.505504 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.506226 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.00619834 +0000 UTC m=+157.984089598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.606657 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.606851 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.106822853 +0000 UTC m=+158.084714111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.607176 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.607482 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.107469973 +0000 UTC m=+158.085361231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.707774 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.708055 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.208012084 +0000 UTC m=+158.185903342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.708388 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.708802 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.208791928 +0000 UTC m=+158.186683356 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.766140 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:39 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:39 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:39 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.766228 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.810287 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.810521 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.310483945 +0000 UTC m=+158.288375203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.810664 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.810983 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.31096786 +0000 UTC m=+158.288859118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.911738 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.912012 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.411968055 +0000 UTC m=+158.389859323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.912530 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:39 crc kubenswrapper[4721]: E1121 13:38:39.913041 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.413018258 +0000 UTC m=+158.390909516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.938505 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" event={"ID":"6a61c6a8-ff2f-47fd-aa02-04abddc2d607","Type":"ContainerStarted","Data":"d5e90a77c633550092d03eea10327027f708f20103d79357f78c528d8ba4cfbf"} Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.945641 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" event={"ID":"f1072b74-e682-4755-ae65-af3f13ab5fe2","Type":"ContainerStarted","Data":"57c003d58cb2d288573d76e293c8f5bbdb028fb3fd1835951261372e7df43ae8"} Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.946547 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.949486 4721 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-nbq9v container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.949569 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" podUID="02155afe-72cf-495e-92e5-3375bf701452" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.967303 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" podStartSLOduration=126.967273798 podStartE2EDuration="2m6.967273798s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:39.966162573 +0000 UTC m=+157.944053841" watchObservedRunningTime="2025-11-21 13:38:39.967273798 +0000 UTC m=+157.945165066" Nov 21 13:38:39 crc kubenswrapper[4721]: I1121 13:38:39.981169 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-p8rcl" podStartSLOduration=126.981140692 podStartE2EDuration="2m6.981140692s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:39.978654404 +0000 UTC m=+157.956545832" watchObservedRunningTime="2025-11-21 13:38:39.981140692 +0000 UTC m=+157.959031950" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.007465 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pbtkt" podStartSLOduration=127.007443395 podStartE2EDuration="2m7.007443395s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:40.007026362 +0000 UTC m=+157.984917620" watchObservedRunningTime="2025-11-21 13:38:40.007443395 +0000 UTC m=+157.985334653" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.015659 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.016159 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.516143108 +0000 UTC m=+158.494034366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.117835 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.118239 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.618223147 +0000 UTC m=+158.596114405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.203265 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.217968 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" podStartSLOduration=127.217950041 podStartE2EDuration="2m7.217950041s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:40.025764519 +0000 UTC m=+158.003655777" watchObservedRunningTime="2025-11-21 13:38:40.217950041 +0000 UTC m=+158.195841299" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.219596 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.219820 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.719791209 +0000 UTC m=+158.697682487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.219973 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.220349 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.720334736 +0000 UTC m=+158.698226074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.270525 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.271329 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.274726 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.274978 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.293688 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.321054 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.321555 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.821530607 +0000 UTC m=+158.799421875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.388155 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.388602 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.390010 4721 patch_prober.go:28] interesting pod/console-f9d7485db-gjhwn container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.390094 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-gjhwn" podUID="ac0251e7-5184-49b4-84ae-ab058632718c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.11:8443/health\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.422743 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.422801 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/293170e3-6ed8-4264-bb29-71c3b30fd882-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.422854 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/293170e3-6ed8-4264-bb29-71c3b30fd882-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.423219 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:40.923198273 +0000 UTC m=+158.901089601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.531318 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.531598 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.031556408 +0000 UTC m=+159.009447676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.531652 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.531717 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/293170e3-6ed8-4264-bb29-71c3b30fd882-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.531795 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/293170e3-6ed8-4264-bb29-71c3b30fd882-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.531902 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/293170e3-6ed8-4264-bb29-71c3b30fd882-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.532240 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.032214249 +0000 UTC m=+159.010105597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.559544 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/293170e3-6ed8-4264-bb29-71c3b30fd882-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.598019 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.632711 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.632889 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.132856252 +0000 UTC m=+159.110747520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.633048 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.633415 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.133403649 +0000 UTC m=+159.111294987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.650048 4721 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-nbq9v container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.650142 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" podUID="02155afe-72cf-495e-92e5-3375bf701452" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.650207 4721 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-nbq9v container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.650279 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" podUID="02155afe-72cf-495e-92e5-3375bf701452" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.733640 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.734616 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.23459907 +0000 UTC m=+159.212490328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.766160 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:40 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:40 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:40 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.766259 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.835822 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.836358 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.336335108 +0000 UTC m=+159.314226366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.849902 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 21 13:38:40 crc kubenswrapper[4721]: W1121 13:38:40.856025 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod293170e3_6ed8_4264_bb29_71c3b30fd882.slice/crio-18faebef826f2f5f9b1f73ae7bfe23dc08c8f3a4d42b5cccae934008d876973f WatchSource:0}: Error finding container 18faebef826f2f5f9b1f73ae7bfe23dc08c8f3a4d42b5cccae934008d876973f: Status 404 returned error can't find the container with id 18faebef826f2f5f9b1f73ae7bfe23dc08c8f3a4d42b5cccae934008d876973f Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.936935 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.937140 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.437095306 +0000 UTC m=+159.414986564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.937697 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:40 crc kubenswrapper[4721]: E1121 13:38:40.938024 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.438009444 +0000 UTC m=+159.415900702 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.952270 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"293170e3-6ed8-4264-bb29-71c3b30fd882","Type":"ContainerStarted","Data":"18faebef826f2f5f9b1f73ae7bfe23dc08c8f3a4d42b5cccae934008d876973f"} Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.964488 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" event={"ID":"eecc36cc-4fa2-4d1d-99d5-fe4e5fb890c7","Type":"ContainerStarted","Data":"7c6932f177219efc57141307a9a4d710939b2cceed597c589fb2c3ac1dc31235"} Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.970684 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-pnmsb" event={"ID":"457ed397-53fe-415b-8d9c-855986ed1108","Type":"ContainerStarted","Data":"b89a6d01a09b611cb8cbc8233f149ba656cde8b2405c98a1bad812190476ffac"} Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.970847 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.973437 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" event={"ID":"36924117-0998-45ef-8887-4a8aa7f5c392","Type":"ContainerStarted","Data":"a309f082465f2ead1de026e465268c69240cf68cb4446765a0f29cb428f202e2"} Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.974128 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.975625 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" event={"ID":"db1c61b3-a406-4b4c-970e-fa47d27a966a","Type":"ContainerStarted","Data":"ea0b6cd6aeeba8604a7a6c95cba76c125e7974cbd6483acb3f9c7f75ef8afbe1"} Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.977676 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" event={"ID":"1d35c914-e539-4c91-87b0-df193d6ba649","Type":"ContainerStarted","Data":"b96da1897534c7e5dc30b7dba578b45acf00b331e314f4a5e78c2580ca6628b1"} Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.978668 4721 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-nbq9v container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Nov 21 13:38:40 crc kubenswrapper[4721]: I1121 13:38:40.978708 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" podUID="02155afe-72cf-495e-92e5-3375bf701452" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.039397 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.039627 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.539591497 +0000 UTC m=+159.517482755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.039685 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.048249 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.548211927 +0000 UTC m=+159.526103355 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.095083 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-jv2w5" podStartSLOduration=128.095041145 podStartE2EDuration="2m8.095041145s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:41.006935744 +0000 UTC m=+158.984826992" watchObservedRunningTime="2025-11-21 13:38:41.095041145 +0000 UTC m=+159.072932403" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.097322 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" podStartSLOduration=128.097311456 podStartE2EDuration="2m8.097311456s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:41.09457411 +0000 UTC m=+159.072465378" watchObservedRunningTime="2025-11-21 13:38:41.097311456 +0000 UTC m=+159.075202714" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.141644 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.143648 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.643621587 +0000 UTC m=+159.621512835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.161125 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-pnmsb" podStartSLOduration=12.161095755 podStartE2EDuration="12.161095755s" podCreationTimestamp="2025-11-21 13:38:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:41.160882648 +0000 UTC m=+159.138773916" watchObservedRunningTime="2025-11-21 13:38:41.161095755 +0000 UTC m=+159.138987023" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.192494 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-pgrjc" podStartSLOduration=128.192472288 podStartE2EDuration="2m8.192472288s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:41.180972197 +0000 UTC m=+159.158863455" watchObservedRunningTime="2025-11-21 13:38:41.192472288 +0000 UTC m=+159.170363546" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.250768 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.251136 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.751118245 +0000 UTC m=+159.729009503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.270728 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jrq68" podStartSLOduration=128.270699859 podStartE2EDuration="2m8.270699859s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:41.214105545 +0000 UTC m=+159.191996803" watchObservedRunningTime="2025-11-21 13:38:41.270699859 +0000 UTC m=+159.248591117" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.351500 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.351944 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.851928514 +0000 UTC m=+159.829819762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.453101 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.453499 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:41.953479576 +0000 UTC m=+159.931370834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.554506 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.554718 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.054687137 +0000 UTC m=+160.032578415 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.554969 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.555270 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.055258585 +0000 UTC m=+160.033149843 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.624439 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.624516 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.624441 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.624585 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.656072 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.656300 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.15626231 +0000 UTC m=+160.134153568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.656431 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.656880 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.156870259 +0000 UTC m=+160.134761597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.675351 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.737435 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-vk9wg" podStartSLOduration=128.737418913 podStartE2EDuration="2m8.737418913s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:41.2713777 +0000 UTC m=+159.249268958" watchObservedRunningTime="2025-11-21 13:38:41.737418913 +0000 UTC m=+159.715310171" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.757525 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.757720 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.257693119 +0000 UTC m=+160.235584387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.757868 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.758229 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.258217945 +0000 UTC m=+160.236109203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.766088 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.779551 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:41 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:41 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:41 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.779622 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.859606 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.860876 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.360857301 +0000 UTC m=+160.338748559 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.866129 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-qkfcx" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.961120 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:41 crc kubenswrapper[4721]: E1121 13:38:41.961811 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.461791594 +0000 UTC m=+160.439682922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.982122 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.984947 4721 generic.go:334] "Generic (PLEG): container finished" podID="293170e3-6ed8-4264-bb29-71c3b30fd882" containerID="77c9452098a5292b31e6230d2f2933889ed6f04a672d1430a038433da91dfc29" exitCode=0 Nov 21 13:38:41 crc kubenswrapper[4721]: I1121 13:38:41.985079 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"293170e3-6ed8-4264-bb29-71c3b30fd882","Type":"ContainerDied","Data":"77c9452098a5292b31e6230d2f2933889ed6f04a672d1430a038433da91dfc29"} Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.007847 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pgjvs" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.062350 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.062567 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.562534741 +0000 UTC m=+160.540426019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.062648 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.063040 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.563020376 +0000 UTC m=+160.540911634 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.164252 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.165383 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.665369243 +0000 UTC m=+160.643260501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.230536 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.242354 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9m9ck" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.266231 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.766216673 +0000 UTC m=+160.744107921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.266412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.318948 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-26t29" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.367390 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.368115 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.868099315 +0000 UTC m=+160.845990573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.469651 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.470096 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:42.970080401 +0000 UTC m=+160.947971659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.570988 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.571181 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.071153078 +0000 UTC m=+161.049044336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.571382 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.571736 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.071724736 +0000 UTC m=+161.049615994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.672159 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.672340 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.172297657 +0000 UTC m=+161.150188915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.672730 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.673035 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.17302113 +0000 UTC m=+161.150912388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.750656 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.751781 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:42 crc kubenswrapper[4721]: W1121 13:38:42.758131 4721 reflector.go:561] object-"openshift-kube-apiserver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-kube-apiserver": no relationship found between node 'crc' and this object Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.758190 4721 reflector.go:158] "Unhandled Error" err="object-\"openshift-kube-apiserver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-kube-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 13:38:42 crc kubenswrapper[4721]: W1121 13:38:42.758207 4721 reflector.go:561] object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n": failed to list *v1.Secret: secrets "installer-sa-dockercfg-5pr6n" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-kube-apiserver": no relationship found between node 'crc' and this object Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.758250 4721 reflector.go:158] "Unhandled Error" err="object-\"openshift-kube-apiserver\"/\"installer-sa-dockercfg-5pr6n\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"installer-sa-dockercfg-5pr6n\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-kube-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.768469 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:42 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:42 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:42 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.768553 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.773828 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.773937 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.273917882 +0000 UTC m=+161.251809140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.774177 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.774429 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.274421827 +0000 UTC m=+161.252313085 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.804880 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.875947 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.876240 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.376187536 +0000 UTC m=+161.354078804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.876332 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.876433 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.876466 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.877131 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.377118945 +0000 UTC m=+161.355010413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.977566 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.977892 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.977948 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.977974 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.978022 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.978046 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.978101 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.979162 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:42 crc kubenswrapper[4721]: E1121 13:38:42.980547 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.480529956 +0000 UTC m=+161.458421214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.986207 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.988431 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.993584 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:42 crc kubenswrapper[4721]: I1121 13:38:42.998401 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.013207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" event={"ID":"6a61c6a8-ff2f-47fd-aa02-04abddc2d607","Type":"ContainerStarted","Data":"cff741097b29b996e713ec51895b736e79892687d5e12f40004495e55210d956"} Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.013246 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" event={"ID":"6a61c6a8-ff2f-47fd-aa02-04abddc2d607","Type":"ContainerStarted","Data":"b7c4422777928cf7784cb151e92984ecc4455813d1404d130986bb6a5f548a03"} Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.079966 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.080337 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.580322593 +0000 UTC m=+161.558213851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.117358 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.151937 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l99jz"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.153307 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.157003 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.182040 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.183568 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.683546187 +0000 UTC m=+161.661437445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.188496 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.248340 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l99jz"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.265150 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.283996 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz7nf\" (UniqueName: \"kubernetes.io/projected/f22ea096-32f4-4972-a8c7-f43072dc8eef-kube-api-access-cz7nf\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.284040 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-catalog-content\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.284572 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.284603 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-utilities\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.285054 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.785036897 +0000 UTC m=+161.762928155 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.349083 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hplx9"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.350182 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.355227 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hplx9"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.359417 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.388768 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.389245 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz7nf\" (UniqueName: \"kubernetes.io/projected/f22ea096-32f4-4972-a8c7-f43072dc8eef-kube-api-access-cz7nf\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.389289 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-catalog-content\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.389342 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-utilities\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.394166 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.894020402 +0000 UTC m=+161.871911650 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.394540 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-catalog-content\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.395694 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-utilities\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.426851 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz7nf\" (UniqueName: \"kubernetes.io/projected/f22ea096-32f4-4972-a8c7-f43072dc8eef-kube-api-access-cz7nf\") pod \"certified-operators-l99jz\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.485187 4721 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.492772 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.492901 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-catalog-content\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.492941 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-utilities\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.492999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctvz6\" (UniqueName: \"kubernetes.io/projected/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-kube-api-access-ctvz6\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.493633 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:43.993607003 +0000 UTC m=+161.971498261 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.497147 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.540722 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sghnf"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.542764 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.548130 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sghnf"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.594056 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.596010 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.596395 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-catalog-content\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.596486 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-utilities\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.596575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctvz6\" (UniqueName: \"kubernetes.io/projected/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-kube-api-access-ctvz6\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.596899 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:44.09664034 +0000 UTC m=+162.074531598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.597030 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-utilities\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.597172 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-catalog-content\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.619858 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctvz6\" (UniqueName: \"kubernetes.io/projected/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-kube-api-access-ctvz6\") pod \"community-operators-hplx9\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.653234 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-nbq9v" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.699796 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/293170e3-6ed8-4264-bb29-71c3b30fd882-kubelet-dir\") pod \"293170e3-6ed8-4264-bb29-71c3b30fd882\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.699954 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/293170e3-6ed8-4264-bb29-71c3b30fd882-kube-api-access\") pod \"293170e3-6ed8-4264-bb29-71c3b30fd882\" (UID: \"293170e3-6ed8-4264-bb29-71c3b30fd882\") " Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.700343 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6bh5\" (UniqueName: \"kubernetes.io/projected/d3fcd3bc-f437-4602-b092-0c9155559334-kube-api-access-c6bh5\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.700391 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.700412 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-catalog-content\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.700485 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-utilities\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.700599 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/293170e3-6ed8-4264-bb29-71c3b30fd882-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "293170e3-6ed8-4264-bb29-71c3b30fd882" (UID: "293170e3-6ed8-4264-bb29-71c3b30fd882"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.703159 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:44.203139037 +0000 UTC m=+162.181030295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.712878 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/293170e3-6ed8-4264-bb29-71c3b30fd882-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "293170e3-6ed8-4264-bb29-71c3b30fd882" (UID: "293170e3-6ed8-4264-bb29-71c3b30fd882"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:38:43 crc kubenswrapper[4721]: W1121 13:38:43.727196 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-84d2a2d3f1acd7e68c4d58bb421acca712bf444c9f302b94be2ef38ba0e410af WatchSource:0}: Error finding container 84d2a2d3f1acd7e68c4d58bb421acca712bf444c9f302b94be2ef38ba0e410af: Status 404 returned error can't find the container with id 84d2a2d3f1acd7e68c4d58bb421acca712bf444c9f302b94be2ef38ba0e410af Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.745934 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lz5k6"] Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.746241 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="293170e3-6ed8-4264-bb29-71c3b30fd882" containerName="pruner" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.746262 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="293170e3-6ed8-4264-bb29-71c3b30fd882" containerName="pruner" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.746394 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="293170e3-6ed8-4264-bb29-71c3b30fd882" containerName="pruner" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.747377 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.748826 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.749530 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lz5k6"] Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.774979 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:43 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:43 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:43 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.775038 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.801741 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.801921 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 13:38:44.301893142 +0000 UTC m=+162.279784400 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.802000 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.802032 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-catalog-content\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.802123 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-utilities\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.802191 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6bh5\" (UniqueName: \"kubernetes.io/projected/d3fcd3bc-f437-4602-b092-0c9155559334-kube-api-access-c6bh5\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.802229 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/293170e3-6ed8-4264-bb29-71c3b30fd882-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.802240 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/293170e3-6ed8-4264-bb29-71c3b30fd882-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:43 crc kubenswrapper[4721]: E1121 13:38:43.802418 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 13:38:44.302410908 +0000 UTC m=+162.280302166 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-crzdf" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.803147 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-catalog-content\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.803209 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-utilities\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.812701 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.824465 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6bh5\" (UniqueName: \"kubernetes.io/projected/d3fcd3bc-f437-4602-b092-0c9155559334-kube-api-access-c6bh5\") pod \"certified-operators-sghnf\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.882919 4721 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-21T13:38:43.48522022Z","Handler":null,"Name":""} Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.904243 4721 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.904288 4721 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.904570 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.904912 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dpmt\" (UniqueName: \"kubernetes.io/projected/89ba540a-2435-4633-9e07-97311b914cbd-kube-api-access-7dpmt\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.905023 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-catalog-content\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.905172 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-utilities\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.905416 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:38:43 crc kubenswrapper[4721]: I1121 13:38:43.918574 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.010485 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.010540 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dpmt\" (UniqueName: \"kubernetes.io/projected/89ba540a-2435-4633-9e07-97311b914cbd-kube-api-access-7dpmt\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.010589 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-catalog-content\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.010735 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-utilities\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.011335 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-utilities\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: E1121 13:38:44.012438 4721 projected.go:288] Couldn't get configMap openshift-kube-apiserver/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:44 crc kubenswrapper[4721]: E1121 13:38:44.012463 4721 projected.go:194] Error preparing data for projected volume kube-api-access for pod openshift-kube-apiserver/revision-pruner-8-crc: failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:44 crc kubenswrapper[4721]: E1121 13:38:44.012498 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access podName:61d80e6e-9ee4-48b1-81d3-a995d6cc9049 nodeName:}" failed. No retries permitted until 2025-11-21 13:38:44.512487031 +0000 UTC m=+162.490378289 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access" (UniqueName: "kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access") pod "revision-pruner-8-crc" (UID: "61d80e6e-9ee4-48b1-81d3-a995d6cc9049") : failed to sync configmap cache: timed out waiting for the condition Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.012766 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-catalog-content\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.017174 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.017480 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.033984 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dpmt\" (UniqueName: \"kubernetes.io/projected/89ba540a-2435-4633-9e07-97311b914cbd-kube-api-access-7dpmt\") pod \"community-operators-lz5k6\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.039117 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"38efa67651e6db35d43ca72505361b387c0372560c03432f4e2d186ff7f8e673"} Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.039158 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f5548e3cdc786a60df6e6f3dcaeab17058e5d4534751e7d1064f03fa265a37c9"} Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.052459 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"293170e3-6ed8-4264-bb29-71c3b30fd882","Type":"ContainerDied","Data":"18faebef826f2f5f9b1f73ae7bfe23dc08c8f3a4d42b5cccae934008d876973f"} Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.052503 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18faebef826f2f5f9b1f73ae7bfe23dc08c8f3a4d42b5cccae934008d876973f" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.052576 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.067982 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.075016 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"bf312368b89b9a3abe3f00d28ed37232caa6ebdd61d1fcba861579e8c30701c1"} Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.075170 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"684276a018eb5137f963573a71a24a06899b24729327077ef04ea1c3badf038a"} Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.088498 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l99jz"] Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.114997 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hplx9"] Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.116124 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" event={"ID":"6a61c6a8-ff2f-47fd-aa02-04abddc2d607","Type":"ContainerStarted","Data":"2829d31cf412b6c09afb967508d89a1703fa1c9e20fc20c0e56cea920d397966"} Nov 21 13:38:44 crc kubenswrapper[4721]: W1121 13:38:44.126106 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod136f4c7e_7cb7_4c2f_8bec_e7a9d8f844f2.slice/crio-2fee2cbb68ddc91b648c8027f0fba55b0846baf9835b12ac358033492de763ca WatchSource:0}: Error finding container 2fee2cbb68ddc91b648c8027f0fba55b0846baf9835b12ac358033492de763ca: Status 404 returned error can't find the container with id 2fee2cbb68ddc91b648c8027f0fba55b0846baf9835b12ac358033492de763ca Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.126382 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"84d2a2d3f1acd7e68c4d58bb421acca712bf444c9f302b94be2ef38ba0e410af"} Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.128440 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.152820 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-5w6rn" podStartSLOduration=16.152797527 podStartE2EDuration="16.152797527s" podCreationTimestamp="2025-11-21 13:38:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:44.150867407 +0000 UTC m=+162.128758665" watchObservedRunningTime="2025-11-21 13:38:44.152797527 +0000 UTC m=+162.130688785" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.198293 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-crzdf\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.236192 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.321293 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.522639 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.529663 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sghnf"] Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.552217 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lz5k6"] Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.553284 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.565307 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.622136 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-crzdf"] Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.769483 4721 patch_prober.go:28] interesting pod/router-default-5444994796-rr29s container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 13:38:44 crc kubenswrapper[4721]: [-]has-synced failed: reason withheld Nov 21 13:38:44 crc kubenswrapper[4721]: [+]process-running ok Nov 21 13:38:44 crc kubenswrapper[4721]: healthz check failed Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.769813 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rr29s" podUID="7cfe2940-b7fe-4556-882d-e413f1f2a270" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 13:38:44 crc kubenswrapper[4721]: I1121 13:38:44.989052 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.119020 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.126149 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tfkj9"] Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.127145 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: W1121 13:38:45.129439 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod61d80e6e_9ee4_48b1_81d3_a995d6cc9049.slice/crio-b27d68036164b8a255a556ae6ae93a4d190bb9cf27cf675ac2603b6e0bc84853 WatchSource:0}: Error finding container b27d68036164b8a255a556ae6ae93a4d190bb9cf27cf675ac2603b6e0bc84853: Status 404 returned error can't find the container with id b27d68036164b8a255a556ae6ae93a4d190bb9cf27cf675ac2603b6e0bc84853 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.143471 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" event={"ID":"b6ae0efb-0b79-4944-9dbb-44da8a370450","Type":"ContainerStarted","Data":"bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.143551 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" event={"ID":"b6ae0efb-0b79-4944-9dbb-44da8a370450","Type":"ContainerStarted","Data":"670e2f679d865a9de5c41d30e42978d0fb9cb64ac31db2343489d3f18ae96308"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.144120 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.154194 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.156872 4721 generic.go:334] "Generic (PLEG): container finished" podID="d3fcd3bc-f437-4602-b092-0c9155559334" containerID="1779aace10bbd85a24fefb9ddfb6251ff8a69a62a2c33a8d7440393a7417e1ce" exitCode=0 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.156964 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sghnf" event={"ID":"d3fcd3bc-f437-4602-b092-0c9155559334","Type":"ContainerDied","Data":"1779aace10bbd85a24fefb9ddfb6251ff8a69a62a2c33a8d7440393a7417e1ce"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.157000 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sghnf" event={"ID":"d3fcd3bc-f437-4602-b092-0c9155559334","Type":"ContainerStarted","Data":"650df45a8cd14fbed6e61e83a3c730ecd755dff8cf17f4957ba628f860a1b3fe"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.163583 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.169342 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tfkj9"] Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.177744 4721 generic.go:334] "Generic (PLEG): container finished" podID="89ba540a-2435-4633-9e07-97311b914cbd" containerID="5de34f2dd76b5a90a53688b17313e7b18e9472ac3e9ee7fd92d6c55e492c8eb8" exitCode=0 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.177939 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lz5k6" event={"ID":"89ba540a-2435-4633-9e07-97311b914cbd","Type":"ContainerDied","Data":"5de34f2dd76b5a90a53688b17313e7b18e9472ac3e9ee7fd92d6c55e492c8eb8"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.178165 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lz5k6" event={"ID":"89ba540a-2435-4633-9e07-97311b914cbd","Type":"ContainerStarted","Data":"aa15f010c703275e662e6c1a2070687745d87b048f70b5309004f80866fdad08"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.200413 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b96d0f3bb695f5e8fb2323a0d575af25ae3e127c26a28e6cdeb24cf58e0e291e"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.221325 4721 generic.go:334] "Generic (PLEG): container finished" podID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerID="595b8d1069ffe31829a2bdebbcd67f2c0299151d4a956a3c0fe3434eb61490f0" exitCode=0 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.221420 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l99jz" event={"ID":"f22ea096-32f4-4972-a8c7-f43072dc8eef","Type":"ContainerDied","Data":"595b8d1069ffe31829a2bdebbcd67f2c0299151d4a956a3c0fe3434eb61490f0"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.221447 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l99jz" event={"ID":"f22ea096-32f4-4972-a8c7-f43072dc8eef","Type":"ContainerStarted","Data":"66122afeed6d325ce874fe76545eacbd68c8e86217ef17db7f34af950cae8fea"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.246139 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzvcq\" (UniqueName: \"kubernetes.io/projected/607645c8-0edd-40d0-959a-5cd14dd67a56-kube-api-access-zzvcq\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.246191 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-utilities\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.246262 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-catalog-content\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.248187 4721 generic.go:334] "Generic (PLEG): container finished" podID="8bd725df-b6b1-4de7-a77a-917a4f592b92" containerID="89ad936f9f2d3995e92789f5dbe5486dd791bb9a77d3cbd01c7872ebd7b26fef" exitCode=0 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.248289 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" event={"ID":"8bd725df-b6b1-4de7-a77a-917a4f592b92","Type":"ContainerDied","Data":"89ad936f9f2d3995e92789f5dbe5486dd791bb9a77d3cbd01c7872ebd7b26fef"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.291418 4721 generic.go:334] "Generic (PLEG): container finished" podID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerID="9422ebbca8b7aa35809f2cf38bd928d435baefe44175138e8af5dd96d79c7d6a" exitCode=0 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.292518 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hplx9" event={"ID":"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2","Type":"ContainerDied","Data":"9422ebbca8b7aa35809f2cf38bd928d435baefe44175138e8af5dd96d79c7d6a"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.292555 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hplx9" event={"ID":"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2","Type":"ContainerStarted","Data":"2fee2cbb68ddc91b648c8027f0fba55b0846baf9835b12ac358033492de763ca"} Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.303724 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" podStartSLOduration=132.303707 podStartE2EDuration="2m12.303707s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:38:45.269010473 +0000 UTC m=+163.246901731" watchObservedRunningTime="2025-11-21 13:38:45.303707 +0000 UTC m=+163.281598258" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.348248 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzvcq\" (UniqueName: \"kubernetes.io/projected/607645c8-0edd-40d0-959a-5cd14dd67a56-kube-api-access-zzvcq\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.348305 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-utilities\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.348365 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-catalog-content\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.348792 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-utilities\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.349108 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-catalog-content\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.392496 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzvcq\" (UniqueName: \"kubernetes.io/projected/607645c8-0edd-40d0-959a-5cd14dd67a56-kube-api-access-zzvcq\") pod \"redhat-marketplace-tfkj9\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.450904 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.524415 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sqtvx"] Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.526074 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.537780 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqtvx"] Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.652750 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-catalog-content\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.652792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-utilities\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.652812 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqwgl\" (UniqueName: \"kubernetes.io/projected/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-kube-api-access-kqwgl\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.689166 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tfkj9"] Nov 21 13:38:45 crc kubenswrapper[4721]: W1121 13:38:45.703705 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod607645c8_0edd_40d0_959a_5cd14dd67a56.slice/crio-def5e761496f9fd41a686baedf63e7f7ad00f288f6179fab5ffa7027218a7b29 WatchSource:0}: Error finding container def5e761496f9fd41a686baedf63e7f7ad00f288f6179fab5ffa7027218a7b29: Status 404 returned error can't find the container with id def5e761496f9fd41a686baedf63e7f7ad00f288f6179fab5ffa7027218a7b29 Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.755110 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-catalog-content\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.755173 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-utilities\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.755210 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqwgl\" (UniqueName: \"kubernetes.io/projected/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-kube-api-access-kqwgl\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.755648 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-catalog-content\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.755719 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-utilities\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.764859 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.767196 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-rr29s" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.778883 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqwgl\" (UniqueName: \"kubernetes.io/projected/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-kube-api-access-kqwgl\") pod \"redhat-marketplace-sqtvx\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:45 crc kubenswrapper[4721]: I1121 13:38:45.843341 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.073785 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqtvx"] Nov 21 13:38:46 crc kubenswrapper[4721]: W1121 13:38:46.078654 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf68146a4_d5d0_41b3_b55a_53fba1ed4a31.slice/crio-1dcc716773b87c018866fdb763e66e89939ffe9b116a5b8405da4a40e472fce0 WatchSource:0}: Error finding container 1dcc716773b87c018866fdb763e66e89939ffe9b116a5b8405da4a40e472fce0: Status 404 returned error can't find the container with id 1dcc716773b87c018866fdb763e66e89939ffe9b116a5b8405da4a40e472fce0 Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.297402 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerStarted","Data":"def5e761496f9fd41a686baedf63e7f7ad00f288f6179fab5ffa7027218a7b29"} Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.298989 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqtvx" event={"ID":"f68146a4-d5d0-41b3-b55a-53fba1ed4a31","Type":"ContainerStarted","Data":"1dcc716773b87c018866fdb763e66e89939ffe9b116a5b8405da4a40e472fce0"} Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.300298 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"61d80e6e-9ee4-48b1-81d3-a995d6cc9049","Type":"ContainerStarted","Data":"b27d68036164b8a255a556ae6ae93a4d190bb9cf27cf675ac2603b6e0bc84853"} Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.335361 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z55jt"] Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.336662 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.339571 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.344799 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z55jt"] Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.465828 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-utilities\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.465885 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5m99\" (UniqueName: \"kubernetes.io/projected/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-kube-api-access-p5m99\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.465915 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-catalog-content\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.567210 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-utilities\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.567531 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5m99\" (UniqueName: \"kubernetes.io/projected/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-kube-api-access-p5m99\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.567554 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-catalog-content\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.567989 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-catalog-content\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.569299 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-utilities\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.592404 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5m99\" (UniqueName: \"kubernetes.io/projected/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-kube-api-access-p5m99\") pod \"redhat-operators-z55jt\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.634823 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.653328 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.737098 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4tpwf"] Nov 21 13:38:46 crc kubenswrapper[4721]: E1121 13:38:46.737823 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bd725df-b6b1-4de7-a77a-917a4f592b92" containerName="collect-profiles" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.737841 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bd725df-b6b1-4de7-a77a-917a4f592b92" containerName="collect-profiles" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.737974 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bd725df-b6b1-4de7-a77a-917a4f592b92" containerName="collect-profiles" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.739018 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.745815 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4tpwf"] Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.769262 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8bd725df-b6b1-4de7-a77a-917a4f592b92-secret-volume\") pod \"8bd725df-b6b1-4de7-a77a-917a4f592b92\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.769646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn8wz\" (UniqueName: \"kubernetes.io/projected/8bd725df-b6b1-4de7-a77a-917a4f592b92-kube-api-access-tn8wz\") pod \"8bd725df-b6b1-4de7-a77a-917a4f592b92\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.769817 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8bd725df-b6b1-4de7-a77a-917a4f592b92-config-volume\") pod \"8bd725df-b6b1-4de7-a77a-917a4f592b92\" (UID: \"8bd725df-b6b1-4de7-a77a-917a4f592b92\") " Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.770716 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bd725df-b6b1-4de7-a77a-917a4f592b92-config-volume" (OuterVolumeSpecName: "config-volume") pod "8bd725df-b6b1-4de7-a77a-917a4f592b92" (UID: "8bd725df-b6b1-4de7-a77a-917a4f592b92"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.776526 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bd725df-b6b1-4de7-a77a-917a4f592b92-kube-api-access-tn8wz" (OuterVolumeSpecName: "kube-api-access-tn8wz") pod "8bd725df-b6b1-4de7-a77a-917a4f592b92" (UID: "8bd725df-b6b1-4de7-a77a-917a4f592b92"). InnerVolumeSpecName "kube-api-access-tn8wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.776795 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bd725df-b6b1-4de7-a77a-917a4f592b92-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8bd725df-b6b1-4de7-a77a-917a4f592b92" (UID: "8bd725df-b6b1-4de7-a77a-917a4f592b92"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.872820 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-utilities\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.872921 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2tbx\" (UniqueName: \"kubernetes.io/projected/e0026c9c-bbcc-4111-bb3d-42667e381763-kube-api-access-n2tbx\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.872946 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-catalog-content\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.873011 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8bd725df-b6b1-4de7-a77a-917a4f592b92-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.873027 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8bd725df-b6b1-4de7-a77a-917a4f592b92-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.873039 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn8wz\" (UniqueName: \"kubernetes.io/projected/8bd725df-b6b1-4de7-a77a-917a4f592b92-kube-api-access-tn8wz\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.930135 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z55jt"] Nov 21 13:38:46 crc kubenswrapper[4721]: W1121 13:38:46.961111 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0eafd92e_ad3e_4a5f_bdb7_0f2f0fc95317.slice/crio-ef78d9f2f5fc55abecfaeb39a15473952f171bfcdcdc4291e3aa4c412e980f7b WatchSource:0}: Error finding container ef78d9f2f5fc55abecfaeb39a15473952f171bfcdcdc4291e3aa4c412e980f7b: Status 404 returned error can't find the container with id ef78d9f2f5fc55abecfaeb39a15473952f171bfcdcdc4291e3aa4c412e980f7b Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.974911 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-utilities\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.974994 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2tbx\" (UniqueName: \"kubernetes.io/projected/e0026c9c-bbcc-4111-bb3d-42667e381763-kube-api-access-n2tbx\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.975020 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-catalog-content\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.975576 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-catalog-content\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.976129 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-utilities\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:46 crc kubenswrapper[4721]: I1121 13:38:46.997110 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2tbx\" (UniqueName: \"kubernetes.io/projected/e0026c9c-bbcc-4111-bb3d-42667e381763-kube-api-access-n2tbx\") pod \"redhat-operators-4tpwf\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.024849 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-pnmsb" Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.079801 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.334424 4721 generic.go:334] "Generic (PLEG): container finished" podID="61d80e6e-9ee4-48b1-81d3-a995d6cc9049" containerID="8fa11cdc7b78279be093d0254cba98bd9401a830990510b2687d8793b9459110" exitCode=0 Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.335358 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"61d80e6e-9ee4-48b1-81d3-a995d6cc9049","Type":"ContainerDied","Data":"8fa11cdc7b78279be093d0254cba98bd9401a830990510b2687d8793b9459110"} Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.348762 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4tpwf"] Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.353091 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerDied","Data":"088d8a88c86bf03b057eceb89eb1433b22103043a98d556db7c537a39242f191"} Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.353900 4721 generic.go:334] "Generic (PLEG): container finished" podID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerID="088d8a88c86bf03b057eceb89eb1433b22103043a98d556db7c537a39242f191" exitCode=0 Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.354028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerStarted","Data":"ef78d9f2f5fc55abecfaeb39a15473952f171bfcdcdc4291e3aa4c412e980f7b"} Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.358155 4721 generic.go:334] "Generic (PLEG): container finished" podID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerID="b4a84b6142d98a6dae82e16e8917102950cbd82e0e07ed33fc84b274fa611459" exitCode=0 Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.358219 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerDied","Data":"b4a84b6142d98a6dae82e16e8917102950cbd82e0e07ed33fc84b274fa611459"} Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.363101 4721 generic.go:334] "Generic (PLEG): container finished" podID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerID="d35dbe71e4b7ba39f8737c7c6fff42e235b5c8604a599c92a6e5c28196e1c08f" exitCode=0 Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.363177 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqtvx" event={"ID":"f68146a4-d5d0-41b3-b55a-53fba1ed4a31","Type":"ContainerDied","Data":"d35dbe71e4b7ba39f8737c7c6fff42e235b5c8604a599c92a6e5c28196e1c08f"} Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.365894 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" event={"ID":"8bd725df-b6b1-4de7-a77a-917a4f592b92","Type":"ContainerDied","Data":"873007ba5515c42c241cd2ed56ef18e88911032a116dd141bcf2c95609b3540f"} Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.365926 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="873007ba5515c42c241cd2ed56ef18e88911032a116dd141bcf2c95609b3540f" Nov 21 13:38:47 crc kubenswrapper[4721]: I1121 13:38:47.365979 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395530-fd2zs" Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.373457 4721 generic.go:334] "Generic (PLEG): container finished" podID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerID="cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c" exitCode=0 Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.373571 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerDied","Data":"cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c"} Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.373623 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerStarted","Data":"be2b5a46228dcb73a4d458ec65db5d9dbb62139b27c057fb38b9629d88035a3b"} Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.752463 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.817235 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access\") pod \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.817415 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kubelet-dir\") pod \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\" (UID: \"61d80e6e-9ee4-48b1-81d3-a995d6cc9049\") " Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.817524 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "61d80e6e-9ee4-48b1-81d3-a995d6cc9049" (UID: "61d80e6e-9ee4-48b1-81d3-a995d6cc9049"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.817990 4721 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.827674 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "61d80e6e-9ee4-48b1-81d3-a995d6cc9049" (UID: "61d80e6e-9ee4-48b1-81d3-a995d6cc9049"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:38:48 crc kubenswrapper[4721]: I1121 13:38:48.919665 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/61d80e6e-9ee4-48b1-81d3-a995d6cc9049-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 13:38:49 crc kubenswrapper[4721]: I1121 13:38:49.407105 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"61d80e6e-9ee4-48b1-81d3-a995d6cc9049","Type":"ContainerDied","Data":"b27d68036164b8a255a556ae6ae93a4d190bb9cf27cf675ac2603b6e0bc84853"} Nov 21 13:38:49 crc kubenswrapper[4721]: I1121 13:38:49.407199 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b27d68036164b8a255a556ae6ae93a4d190bb9cf27cf675ac2603b6e0bc84853" Nov 21 13:38:49 crc kubenswrapper[4721]: I1121 13:38:49.407227 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 13:38:50 crc kubenswrapper[4721]: I1121 13:38:50.392097 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:50 crc kubenswrapper[4721]: I1121 13:38:50.395901 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-gjhwn" Nov 21 13:38:51 crc kubenswrapper[4721]: I1121 13:38:51.624548 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:38:51 crc kubenswrapper[4721]: I1121 13:38:51.624613 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:38:51 crc kubenswrapper[4721]: I1121 13:38:51.624643 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:38:51 crc kubenswrapper[4721]: I1121 13:38:51.624757 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:38:55 crc kubenswrapper[4721]: I1121 13:38:55.863506 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:55 crc kubenswrapper[4721]: I1121 13:38:55.886869 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e613396d-3bd5-4a6f-8f9a-507f40954ee6-metrics-certs\") pod \"network-metrics-daemon-sr6gv\" (UID: \"e613396d-3bd5-4a6f-8f9a-507f40954ee6\") " pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:38:56 crc kubenswrapper[4721]: I1121 13:38:56.152301 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sr6gv" Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.624505 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.625224 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.624830 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.625314 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.625364 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.626023 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.626103 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.626917 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"08d9afd78ab94c0c2a6219846851aa74b255c04f1cef9a956e1cb020c897a5d1"} pod="openshift-console/downloads-7954f5f757-vvz5j" containerMessage="Container download-server failed liveness probe, will be restarted" Nov 21 13:39:01 crc kubenswrapper[4721]: I1121 13:39:01.627001 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" containerID="cri-o://08d9afd78ab94c0c2a6219846851aa74b255c04f1cef9a956e1cb020c897a5d1" gracePeriod=2 Nov 21 13:39:02 crc kubenswrapper[4721]: I1121 13:39:02.536269 4721 generic.go:334] "Generic (PLEG): container finished" podID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerID="08d9afd78ab94c0c2a6219846851aa74b255c04f1cef9a956e1cb020c897a5d1" exitCode=0 Nov 21 13:39:02 crc kubenswrapper[4721]: I1121 13:39:02.536312 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vvz5j" event={"ID":"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde","Type":"ContainerDied","Data":"08d9afd78ab94c0c2a6219846851aa74b255c04f1cef9a956e1cb020c897a5d1"} Nov 21 13:39:04 crc kubenswrapper[4721]: I1121 13:39:04.246748 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:39:05 crc kubenswrapper[4721]: I1121 13:39:05.262386 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:39:05 crc kubenswrapper[4721]: I1121 13:39:05.263951 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:39:11 crc kubenswrapper[4721]: I1121 13:39:11.624918 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:11 crc kubenswrapper[4721]: I1121 13:39:11.626298 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:12 crc kubenswrapper[4721]: I1121 13:39:12.244982 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-r9lmq" Nov 21 13:39:16 crc kubenswrapper[4721]: E1121 13:39:16.811590 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 21 13:39:16 crc kubenswrapper[4721]: E1121 13:39:16.812415 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7dpmt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-lz5k6_openshift-marketplace(89ba540a-2435-4633-9e07-97311b914cbd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:16 crc kubenswrapper[4721]: E1121 13:39:16.813521 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-lz5k6" podUID="89ba540a-2435-4633-9e07-97311b914cbd" Nov 21 13:39:21 crc kubenswrapper[4721]: I1121 13:39:21.624959 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:21 crc kubenswrapper[4721]: I1121 13:39:21.625479 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:21 crc kubenswrapper[4721]: E1121 13:39:21.887114 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 21 13:39:21 crc kubenswrapper[4721]: E1121 13:39:21.887330 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ctvz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hplx9_openshift-marketplace(136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:21 crc kubenswrapper[4721]: E1121 13:39:21.889440 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hplx9" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" Nov 21 13:39:21 crc kubenswrapper[4721]: E1121 13:39:21.927935 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 21 13:39:21 crc kubenswrapper[4721]: E1121 13:39:21.928164 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c6bh5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-sghnf_openshift-marketplace(d3fcd3bc-f437-4602-b092-0c9155559334): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:21 crc kubenswrapper[4721]: E1121 13:39:21.929522 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-sghnf" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" Nov 21 13:39:23 crc kubenswrapper[4721]: E1121 13:39:23.118626 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 21 13:39:23 crc kubenswrapper[4721]: E1121 13:39:23.118802 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kqwgl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-sqtvx_openshift-marketplace(f68146a4-d5d0-41b3-b55a-53fba1ed4a31): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:23 crc kubenswrapper[4721]: E1121 13:39:23.120857 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-sqtvx" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" Nov 21 13:39:23 crc kubenswrapper[4721]: I1121 13:39:23.195775 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 13:39:28 crc kubenswrapper[4721]: E1121 13:39:28.582911 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-sqtvx" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" Nov 21 13:39:28 crc kubenswrapper[4721]: E1121 13:39:28.583009 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-sghnf" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" Nov 21 13:39:28 crc kubenswrapper[4721]: E1121 13:39:28.583495 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-hplx9" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" Nov 21 13:39:30 crc kubenswrapper[4721]: E1121 13:39:30.133905 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 21 13:39:30 crc kubenswrapper[4721]: E1121 13:39:30.134152 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p5m99,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-z55jt_openshift-marketplace(0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:30 crc kubenswrapper[4721]: E1121 13:39:30.135304 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-z55jt" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" Nov 21 13:39:31 crc kubenswrapper[4721]: I1121 13:39:31.625608 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:31 crc kubenswrapper[4721]: I1121 13:39:31.625693 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:35 crc kubenswrapper[4721]: I1121 13:39:35.262732 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:39:35 crc kubenswrapper[4721]: I1121 13:39:35.263179 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:39:40 crc kubenswrapper[4721]: I1121 13:39:40.972122 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-sr6gv"] Nov 21 13:39:40 crc kubenswrapper[4721]: W1121 13:39:40.987218 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode613396d_3bd5_4a6f_8f9a_507f40954ee6.slice/crio-183aaef1f3d71301030a64fc2e8f28b7fe04931cbfe4d14b5cf9b1a3ca8cd81c WatchSource:0}: Error finding container 183aaef1f3d71301030a64fc2e8f28b7fe04931cbfe4d14b5cf9b1a3ca8cd81c: Status 404 returned error can't find the container with id 183aaef1f3d71301030a64fc2e8f28b7fe04931cbfe4d14b5cf9b1a3ca8cd81c Nov 21 13:39:41 crc kubenswrapper[4721]: I1121 13:39:41.624428 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:41 crc kubenswrapper[4721]: I1121 13:39:41.624540 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:41 crc kubenswrapper[4721]: I1121 13:39:41.810487 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" event={"ID":"e613396d-3bd5-4a6f-8f9a-507f40954ee6","Type":"ContainerStarted","Data":"183aaef1f3d71301030a64fc2e8f28b7fe04931cbfe4d14b5cf9b1a3ca8cd81c"} Nov 21 13:39:42 crc kubenswrapper[4721]: E1121 13:39:42.991450 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 21 13:39:42 crc kubenswrapper[4721]: E1121 13:39:42.992191 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n2tbx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4tpwf_openshift-marketplace(e0026c9c-bbcc-4111-bb3d-42667e381763): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:42 crc kubenswrapper[4721]: E1121 13:39:42.994402 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4tpwf" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" Nov 21 13:39:43 crc kubenswrapper[4721]: E1121 13:39:43.108866 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 21 13:39:43 crc kubenswrapper[4721]: E1121 13:39:43.109583 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zzvcq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tfkj9_openshift-marketplace(607645c8-0edd-40d0-959a-5cd14dd67a56): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 13:39:43 crc kubenswrapper[4721]: E1121 13:39:43.111210 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tfkj9" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.823399 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vvz5j" event={"ID":"8b0cfc08-43fe-4026-b4d1-b3aae8c26fde","Type":"ContainerStarted","Data":"2535f3488b24117df188ba03499efceda18c3a5bcc648d2f7d0cb83b65877cf3"} Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.824099 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.824601 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.824706 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.825436 4721 generic.go:334] "Generic (PLEG): container finished" podID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerID="be139ac75cf57654704aa0d6f905ad1b2644256027564b56e4d36ab06a49effa" exitCode=0 Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.825554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l99jz" event={"ID":"f22ea096-32f4-4972-a8c7-f43072dc8eef","Type":"ContainerDied","Data":"be139ac75cf57654704aa0d6f905ad1b2644256027564b56e4d36ab06a49effa"} Nov 21 13:39:43 crc kubenswrapper[4721]: I1121 13:39:43.826808 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" event={"ID":"e613396d-3bd5-4a6f-8f9a-507f40954ee6","Type":"ContainerStarted","Data":"eaaeaa7a013092f272d0c11a646002946565c6898d3e58927a340f4d768669ac"} Nov 21 13:39:43 crc kubenswrapper[4721]: E1121 13:39:43.827966 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4tpwf" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" Nov 21 13:39:43 crc kubenswrapper[4721]: E1121 13:39:43.828463 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tfkj9" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" Nov 21 13:39:44 crc kubenswrapper[4721]: I1121 13:39:44.833856 4721 patch_prober.go:28] interesting pod/downloads-7954f5f757-vvz5j container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Nov 21 13:39:44 crc kubenswrapper[4721]: I1121 13:39:44.834827 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vvz5j" podUID="8b0cfc08-43fe-4026-b4d1-b3aae8c26fde" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Nov 21 13:39:45 crc kubenswrapper[4721]: I1121 13:39:45.843101 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-sr6gv" event={"ID":"e613396d-3bd5-4a6f-8f9a-507f40954ee6","Type":"ContainerStarted","Data":"4c4b8bb49a4f578df1343f8a04368aead8d7a9122510b745b80a606f187eb5f8"} Nov 21 13:39:46 crc kubenswrapper[4721]: I1121 13:39:46.867574 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-sr6gv" podStartSLOduration=193.867552049 podStartE2EDuration="3m13.867552049s" podCreationTimestamp="2025-11-21 13:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:39:46.867136607 +0000 UTC m=+224.845027885" watchObservedRunningTime="2025-11-21 13:39:46.867552049 +0000 UTC m=+224.845443327" Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.912119 4721 generic.go:334] "Generic (PLEG): container finished" podID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerID="5cff64f44ed70e039d9fd1912c5abf0e4e9dc2124aad040224ab6ad98a29a504" exitCode=0 Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.912229 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqtvx" event={"ID":"f68146a4-d5d0-41b3-b55a-53fba1ed4a31","Type":"ContainerDied","Data":"5cff64f44ed70e039d9fd1912c5abf0e4e9dc2124aad040224ab6ad98a29a504"} Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.919952 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l99jz" event={"ID":"f22ea096-32f4-4972-a8c7-f43072dc8eef","Type":"ContainerStarted","Data":"4b457bfc24eff2de12b608d0f1537dadf454035a50081fd9806566d63cfe8eff"} Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.926849 4721 generic.go:334] "Generic (PLEG): container finished" podID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerID="e1b12b392ab2f2f5a98e54de33584ccd9fc6d6ba853472869985d652123afba8" exitCode=0 Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.927010 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hplx9" event={"ID":"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2","Type":"ContainerDied","Data":"e1b12b392ab2f2f5a98e54de33584ccd9fc6d6ba853472869985d652123afba8"} Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.932274 4721 generic.go:334] "Generic (PLEG): container finished" podID="d3fcd3bc-f437-4602-b092-0c9155559334" containerID="6e866f510aa1fa9982904e65e35ca8e08ce38094fe83da2b7d76f7338e7a7513" exitCode=0 Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.932337 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sghnf" event={"ID":"d3fcd3bc-f437-4602-b092-0c9155559334","Type":"ContainerDied","Data":"6e866f510aa1fa9982904e65e35ca8e08ce38094fe83da2b7d76f7338e7a7513"} Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.941979 4721 generic.go:334] "Generic (PLEG): container finished" podID="89ba540a-2435-4633-9e07-97311b914cbd" containerID="6955c0e94ff3f8bb7e5e30edd5b67b18be442e9bc87c207e766ac3fda6f54d02" exitCode=0 Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.942141 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lz5k6" event={"ID":"89ba540a-2435-4633-9e07-97311b914cbd","Type":"ContainerDied","Data":"6955c0e94ff3f8bb7e5e30edd5b67b18be442e9bc87c207e766ac3fda6f54d02"} Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.946352 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerStarted","Data":"376201416b523b3c26a09ed37cb286b05828809b71aab7e1153c1d7ba3a7b5cb"} Nov 21 13:39:50 crc kubenswrapper[4721]: I1121 13:39:50.978327 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l99jz" podStartSLOduration=3.208030255 podStartE2EDuration="1m7.978291996s" podCreationTimestamp="2025-11-21 13:38:43 +0000 UTC" firstStartedPulling="2025-11-21 13:38:45.226264044 +0000 UTC m=+163.204155292" lastFinishedPulling="2025-11-21 13:39:49.996525775 +0000 UTC m=+227.974417033" observedRunningTime="2025-11-21 13:39:50.969682029 +0000 UTC m=+228.947573317" watchObservedRunningTime="2025-11-21 13:39:50.978291996 +0000 UTC m=+228.956183284" Nov 21 13:39:51 crc kubenswrapper[4721]: I1121 13:39:51.643490 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-vvz5j" Nov 21 13:39:51 crc kubenswrapper[4721]: I1121 13:39:51.954736 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqtvx" event={"ID":"f68146a4-d5d0-41b3-b55a-53fba1ed4a31","Type":"ContainerStarted","Data":"3082905e259e73e94c167cd4023b93ea80819b3d4d0ec679bcee677c51a5d0b0"} Nov 21 13:39:51 crc kubenswrapper[4721]: I1121 13:39:51.960362 4721 generic.go:334] "Generic (PLEG): container finished" podID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerID="376201416b523b3c26a09ed37cb286b05828809b71aab7e1153c1d7ba3a7b5cb" exitCode=0 Nov 21 13:39:51 crc kubenswrapper[4721]: I1121 13:39:51.960878 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerDied","Data":"376201416b523b3c26a09ed37cb286b05828809b71aab7e1153c1d7ba3a7b5cb"} Nov 21 13:39:51 crc kubenswrapper[4721]: I1121 13:39:51.984939 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sqtvx" podStartSLOduration=3.003022381 podStartE2EDuration="1m6.984913921s" podCreationTimestamp="2025-11-21 13:38:45 +0000 UTC" firstStartedPulling="2025-11-21 13:38:47.365138533 +0000 UTC m=+165.343029791" lastFinishedPulling="2025-11-21 13:39:51.347030053 +0000 UTC m=+229.324921331" observedRunningTime="2025-11-21 13:39:51.979804824 +0000 UTC m=+229.957696112" watchObservedRunningTime="2025-11-21 13:39:51.984913921 +0000 UTC m=+229.962805199" Nov 21 13:39:52 crc kubenswrapper[4721]: I1121 13:39:52.968681 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sghnf" event={"ID":"d3fcd3bc-f437-4602-b092-0c9155559334","Type":"ContainerStarted","Data":"4b8c0fc2c9bce4ff8ab0b190efbf0a4e587eb94edb0b590f3c7033abcfcc4208"} Nov 21 13:39:52 crc kubenswrapper[4721]: I1121 13:39:52.971386 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lz5k6" event={"ID":"89ba540a-2435-4633-9e07-97311b914cbd","Type":"ContainerStarted","Data":"bd8ffcfad662b46c2b1c91126df5641236eaf68baa5910fe6647d1e7c972a264"} Nov 21 13:39:52 crc kubenswrapper[4721]: I1121 13:39:52.973417 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerStarted","Data":"b7af1e767118e18b1d5d365ff50155588935c8a32b5cbfac4740c1f70435200a"} Nov 21 13:39:52 crc kubenswrapper[4721]: I1121 13:39:52.975355 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hplx9" event={"ID":"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2","Type":"ContainerStarted","Data":"fa8c4d886cc87e4e3f48137cabc820d523dc5944c4661df85b91115ebcd013dc"} Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.016934 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sghnf" podStartSLOduration=3.280070085 podStartE2EDuration="1m10.016919177s" podCreationTimestamp="2025-11-21 13:38:43 +0000 UTC" firstStartedPulling="2025-11-21 13:38:45.16327149 +0000 UTC m=+163.141162748" lastFinishedPulling="2025-11-21 13:39:51.900120582 +0000 UTC m=+229.878011840" observedRunningTime="2025-11-21 13:39:52.992595108 +0000 UTC m=+230.970486366" watchObservedRunningTime="2025-11-21 13:39:53.016919177 +0000 UTC m=+230.994810435" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.017936 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lz5k6" podStartSLOduration=4.385254623 podStartE2EDuration="1m10.017928067s" podCreationTimestamp="2025-11-21 13:38:43 +0000 UTC" firstStartedPulling="2025-11-21 13:38:46.302241979 +0000 UTC m=+164.280133237" lastFinishedPulling="2025-11-21 13:39:51.934915423 +0000 UTC m=+229.912806681" observedRunningTime="2025-11-21 13:39:53.013913681 +0000 UTC m=+230.991804939" watchObservedRunningTime="2025-11-21 13:39:53.017928067 +0000 UTC m=+230.995819325" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.059197 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hplx9" podStartSLOduration=3.330628151 podStartE2EDuration="1m10.059179143s" podCreationTimestamp="2025-11-21 13:38:43 +0000 UTC" firstStartedPulling="2025-11-21 13:38:45.335163996 +0000 UTC m=+163.313055254" lastFinishedPulling="2025-11-21 13:39:52.063714988 +0000 UTC m=+230.041606246" observedRunningTime="2025-11-21 13:39:53.039105896 +0000 UTC m=+231.016997144" watchObservedRunningTime="2025-11-21 13:39:53.059179143 +0000 UTC m=+231.037070401" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.059494 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z55jt" podStartSLOduration=2.406565845 podStartE2EDuration="1m7.059487502s" podCreationTimestamp="2025-11-21 13:38:46 +0000 UTC" firstStartedPulling="2025-11-21 13:38:47.354865962 +0000 UTC m=+165.332757220" lastFinishedPulling="2025-11-21 13:39:52.007787619 +0000 UTC m=+229.985678877" observedRunningTime="2025-11-21 13:39:53.058688299 +0000 UTC m=+231.036579557" watchObservedRunningTime="2025-11-21 13:39:53.059487502 +0000 UTC m=+231.037378760" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.498814 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.498892 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.749797 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.749859 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.821618 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.906975 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.907224 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:39:53 crc kubenswrapper[4721]: I1121 13:39:53.949689 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:39:54 crc kubenswrapper[4721]: I1121 13:39:54.068883 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:39:54 crc kubenswrapper[4721]: I1121 13:39:54.068945 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:39:54 crc kubenswrapper[4721]: I1121 13:39:54.800344 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-hplx9" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="registry-server" probeResult="failure" output=< Nov 21 13:39:54 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Nov 21 13:39:54 crc kubenswrapper[4721]: > Nov 21 13:39:55 crc kubenswrapper[4721]: I1121 13:39:55.125377 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-lz5k6" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="registry-server" probeResult="failure" output=< Nov 21 13:39:55 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Nov 21 13:39:55 crc kubenswrapper[4721]: > Nov 21 13:39:55 crc kubenswrapper[4721]: I1121 13:39:55.845473 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:39:55 crc kubenswrapper[4721]: I1121 13:39:55.849274 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:39:55 crc kubenswrapper[4721]: I1121 13:39:55.913211 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:39:56 crc kubenswrapper[4721]: I1121 13:39:56.655401 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:39:56 crc kubenswrapper[4721]: I1121 13:39:56.655458 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:39:57 crc kubenswrapper[4721]: I1121 13:39:57.729444 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z55jt" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="registry-server" probeResult="failure" output=< Nov 21 13:39:57 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Nov 21 13:39:57 crc kubenswrapper[4721]: > Nov 21 13:40:03 crc kubenswrapper[4721]: I1121 13:40:03.580565 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:40:03 crc kubenswrapper[4721]: I1121 13:40:03.820011 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:40:03 crc kubenswrapper[4721]: I1121 13:40:03.886831 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:40:03 crc kubenswrapper[4721]: I1121 13:40:03.975688 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:40:04 crc kubenswrapper[4721]: I1121 13:40:04.143678 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:40:04 crc kubenswrapper[4721]: I1121 13:40:04.209315 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.262260 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.262374 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.262470 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.263796 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.263940 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6" gracePeriod=600 Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.636688 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lz5k6"] Nov 21 13:40:05 crc kubenswrapper[4721]: I1121 13:40:05.920179 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:40:06 crc kubenswrapper[4721]: I1121 13:40:06.063737 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lz5k6" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="registry-server" containerID="cri-o://bd8ffcfad662b46c2b1c91126df5641236eaf68baa5910fe6647d1e7c972a264" gracePeriod=2 Nov 21 13:40:06 crc kubenswrapper[4721]: I1121 13:40:06.235985 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sghnf"] Nov 21 13:40:06 crc kubenswrapper[4721]: I1121 13:40:06.236533 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sghnf" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="registry-server" containerID="cri-o://4b8c0fc2c9bce4ff8ab0b190efbf0a4e587eb94edb0b590f3c7033abcfcc4208" gracePeriod=2 Nov 21 13:40:06 crc kubenswrapper[4721]: I1121 13:40:06.726255 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:40:06 crc kubenswrapper[4721]: I1121 13:40:06.802537 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:40:08 crc kubenswrapper[4721]: I1121 13:40:08.634956 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqtvx"] Nov 21 13:40:08 crc kubenswrapper[4721]: I1121 13:40:08.638621 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sqtvx" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="registry-server" containerID="cri-o://3082905e259e73e94c167cd4023b93ea80819b3d4d0ec679bcee677c51a5d0b0" gracePeriod=2 Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.111159 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerStarted","Data":"7d650514d49a9863350acc58685135d573d12d86e131c35d4858dc009d0b00e1"} Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.114258 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6" exitCode=0 Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.114340 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6"} Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.117155 4721 generic.go:334] "Generic (PLEG): container finished" podID="d3fcd3bc-f437-4602-b092-0c9155559334" containerID="4b8c0fc2c9bce4ff8ab0b190efbf0a4e587eb94edb0b590f3c7033abcfcc4208" exitCode=0 Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.117231 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sghnf" event={"ID":"d3fcd3bc-f437-4602-b092-0c9155559334","Type":"ContainerDied","Data":"4b8c0fc2c9bce4ff8ab0b190efbf0a4e587eb94edb0b590f3c7033abcfcc4208"} Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.121750 4721 generic.go:334] "Generic (PLEG): container finished" podID="89ba540a-2435-4633-9e07-97311b914cbd" containerID="bd8ffcfad662b46c2b1c91126df5641236eaf68baa5910fe6647d1e7c972a264" exitCode=0 Nov 21 13:40:10 crc kubenswrapper[4721]: I1121 13:40:10.121835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lz5k6" event={"ID":"89ba540a-2435-4633-9e07-97311b914cbd","Type":"ContainerDied","Data":"bd8ffcfad662b46c2b1c91126df5641236eaf68baa5910fe6647d1e7c972a264"} Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.065957 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.070192 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.132923 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerStarted","Data":"d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c"} Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.135769 4721 generic.go:334] "Generic (PLEG): container finished" podID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerID="7d650514d49a9863350acc58685135d573d12d86e131c35d4858dc009d0b00e1" exitCode=0 Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.135839 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerDied","Data":"7d650514d49a9863350acc58685135d573d12d86e131c35d4858dc009d0b00e1"} Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.144073 4721 generic.go:334] "Generic (PLEG): container finished" podID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerID="3082905e259e73e94c167cd4023b93ea80819b3d4d0ec679bcee677c51a5d0b0" exitCode=0 Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.144094 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqtvx" event={"ID":"f68146a4-d5d0-41b3-b55a-53fba1ed4a31","Type":"ContainerDied","Data":"3082905e259e73e94c167cd4023b93ea80819b3d4d0ec679bcee677c51a5d0b0"} Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.148134 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sghnf" event={"ID":"d3fcd3bc-f437-4602-b092-0c9155559334","Type":"ContainerDied","Data":"650df45a8cd14fbed6e61e83a3c730ecd755dff8cf17f4957ba628f860a1b3fe"} Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.148168 4721 scope.go:117] "RemoveContainer" containerID="4b8c0fc2c9bce4ff8ab0b190efbf0a4e587eb94edb0b590f3c7033abcfcc4208" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.148201 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sghnf" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.150829 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lz5k6" event={"ID":"89ba540a-2435-4633-9e07-97311b914cbd","Type":"ContainerDied","Data":"aa15f010c703275e662e6c1a2070687745d87b048f70b5309004f80866fdad08"} Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.150920 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lz5k6" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.198366 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-utilities\") pod \"d3fcd3bc-f437-4602-b092-0c9155559334\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.198500 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dpmt\" (UniqueName: \"kubernetes.io/projected/89ba540a-2435-4633-9e07-97311b914cbd-kube-api-access-7dpmt\") pod \"89ba540a-2435-4633-9e07-97311b914cbd\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.198541 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-catalog-content\") pod \"d3fcd3bc-f437-4602-b092-0c9155559334\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.198581 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6bh5\" (UniqueName: \"kubernetes.io/projected/d3fcd3bc-f437-4602-b092-0c9155559334-kube-api-access-c6bh5\") pod \"d3fcd3bc-f437-4602-b092-0c9155559334\" (UID: \"d3fcd3bc-f437-4602-b092-0c9155559334\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.198646 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-catalog-content\") pod \"89ba540a-2435-4633-9e07-97311b914cbd\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.198679 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-utilities\") pod \"89ba540a-2435-4633-9e07-97311b914cbd\" (UID: \"89ba540a-2435-4633-9e07-97311b914cbd\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.200327 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-utilities" (OuterVolumeSpecName: "utilities") pod "d3fcd3bc-f437-4602-b092-0c9155559334" (UID: "d3fcd3bc-f437-4602-b092-0c9155559334"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.200420 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-utilities" (OuterVolumeSpecName: "utilities") pod "89ba540a-2435-4633-9e07-97311b914cbd" (UID: "89ba540a-2435-4633-9e07-97311b914cbd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.220767 4721 scope.go:117] "RemoveContainer" containerID="6e866f510aa1fa9982904e65e35ca8e08ce38094fe83da2b7d76f7338e7a7513" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.220789 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ba540a-2435-4633-9e07-97311b914cbd-kube-api-access-7dpmt" (OuterVolumeSpecName: "kube-api-access-7dpmt") pod "89ba540a-2435-4633-9e07-97311b914cbd" (UID: "89ba540a-2435-4633-9e07-97311b914cbd"). InnerVolumeSpecName "kube-api-access-7dpmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.230197 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3fcd3bc-f437-4602-b092-0c9155559334-kube-api-access-c6bh5" (OuterVolumeSpecName: "kube-api-access-c6bh5") pod "d3fcd3bc-f437-4602-b092-0c9155559334" (UID: "d3fcd3bc-f437-4602-b092-0c9155559334"). InnerVolumeSpecName "kube-api-access-c6bh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.300409 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6bh5\" (UniqueName: \"kubernetes.io/projected/d3fcd3bc-f437-4602-b092-0c9155559334-kube-api-access-c6bh5\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.300573 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.300863 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.300881 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dpmt\" (UniqueName: \"kubernetes.io/projected/89ba540a-2435-4633-9e07-97311b914cbd-kube-api-access-7dpmt\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.320571 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89ba540a-2435-4633-9e07-97311b914cbd" (UID: "89ba540a-2435-4633-9e07-97311b914cbd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.331622 4721 scope.go:117] "RemoveContainer" containerID="1779aace10bbd85a24fefb9ddfb6251ff8a69a62a2c33a8d7440393a7417e1ce" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.339159 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3fcd3bc-f437-4602-b092-0c9155559334" (UID: "d3fcd3bc-f437-4602-b092-0c9155559334"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.344899 4721 scope.go:117] "RemoveContainer" containerID="bd8ffcfad662b46c2b1c91126df5641236eaf68baa5910fe6647d1e7c972a264" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.357148 4721 scope.go:117] "RemoveContainer" containerID="6955c0e94ff3f8bb7e5e30edd5b67b18be442e9bc87c207e766ac3fda6f54d02" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.370573 4721 scope.go:117] "RemoveContainer" containerID="5de34f2dd76b5a90a53688b17313e7b18e9472ac3e9ee7fd92d6c55e492c8eb8" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.402716 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3fcd3bc-f437-4602-b092-0c9155559334-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.402761 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89ba540a-2435-4633-9e07-97311b914cbd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.487055 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lz5k6"] Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.490525 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lz5k6"] Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.503304 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sghnf"] Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.525891 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sghnf"] Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.786615 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.923982 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqwgl\" (UniqueName: \"kubernetes.io/projected/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-kube-api-access-kqwgl\") pod \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.924177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-utilities\") pod \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.924255 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-catalog-content\") pod \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\" (UID: \"f68146a4-d5d0-41b3-b55a-53fba1ed4a31\") " Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.924978 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-utilities" (OuterVolumeSpecName: "utilities") pod "f68146a4-d5d0-41b3-b55a-53fba1ed4a31" (UID: "f68146a4-d5d0-41b3-b55a-53fba1ed4a31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.928940 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-kube-api-access-kqwgl" (OuterVolumeSpecName: "kube-api-access-kqwgl") pod "f68146a4-d5d0-41b3-b55a-53fba1ed4a31" (UID: "f68146a4-d5d0-41b3-b55a-53fba1ed4a31"). InnerVolumeSpecName "kube-api-access-kqwgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:40:11 crc kubenswrapper[4721]: I1121 13:40:11.940884 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f68146a4-d5d0-41b3-b55a-53fba1ed4a31" (UID: "f68146a4-d5d0-41b3-b55a-53fba1ed4a31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.025272 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqwgl\" (UniqueName: \"kubernetes.io/projected/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-kube-api-access-kqwgl\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.025310 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.025324 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f68146a4-d5d0-41b3-b55a-53fba1ed4a31-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.170759 4721 generic.go:334] "Generic (PLEG): container finished" podID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerID="d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c" exitCode=0 Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.170827 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerDied","Data":"d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c"} Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.175828 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sqtvx" event={"ID":"f68146a4-d5d0-41b3-b55a-53fba1ed4a31","Type":"ContainerDied","Data":"1dcc716773b87c018866fdb763e66e89939ffe9b116a5b8405da4a40e472fce0"} Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.175892 4721 scope.go:117] "RemoveContainer" containerID="3082905e259e73e94c167cd4023b93ea80819b3d4d0ec679bcee677c51a5d0b0" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.176012 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sqtvx" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.183772 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"cc646a5815972aaf7a75fc681835a3564be98e7ddc4ac19072eaae8996d79323"} Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.211639 4721 scope.go:117] "RemoveContainer" containerID="5cff64f44ed70e039d9fd1912c5abf0e4e9dc2124aad040224ab6ad98a29a504" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.230466 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqtvx"] Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.233753 4721 scope.go:117] "RemoveContainer" containerID="d35dbe71e4b7ba39f8737c7c6fff42e235b5c8604a599c92a6e5c28196e1c08f" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.238617 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sqtvx"] Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.989525 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89ba540a-2435-4633-9e07-97311b914cbd" path="/var/lib/kubelet/pods/89ba540a-2435-4633-9e07-97311b914cbd/volumes" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.991078 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" path="/var/lib/kubelet/pods/d3fcd3bc-f437-4602-b092-0c9155559334/volumes" Nov 21 13:40:12 crc kubenswrapper[4721]: I1121 13:40:12.991755 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" path="/var/lib/kubelet/pods/f68146a4-d5d0-41b3-b55a-53fba1ed4a31/volumes" Nov 21 13:40:13 crc kubenswrapper[4721]: I1121 13:40:13.195293 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerStarted","Data":"cdbb9fbed3aa41598ac56fee5e67220a348f7472681e824e6285d6135e517312"} Nov 21 13:40:13 crc kubenswrapper[4721]: I1121 13:40:13.200642 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerStarted","Data":"6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd"} Nov 21 13:40:13 crc kubenswrapper[4721]: I1121 13:40:13.232286 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tfkj9" podStartSLOduration=2.7597228129999998 podStartE2EDuration="1m28.232269953s" podCreationTimestamp="2025-11-21 13:38:45 +0000 UTC" firstStartedPulling="2025-11-21 13:38:47.365191685 +0000 UTC m=+165.343082943" lastFinishedPulling="2025-11-21 13:40:12.837738805 +0000 UTC m=+250.815630083" observedRunningTime="2025-11-21 13:40:13.231794189 +0000 UTC m=+251.209685457" watchObservedRunningTime="2025-11-21 13:40:13.232269953 +0000 UTC m=+251.210161211" Nov 21 13:40:13 crc kubenswrapper[4721]: I1121 13:40:13.264550 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4tpwf" podStartSLOduration=2.615575145 podStartE2EDuration="1m27.264533171s" podCreationTimestamp="2025-11-21 13:38:46 +0000 UTC" firstStartedPulling="2025-11-21 13:38:48.378489916 +0000 UTC m=+166.356381174" lastFinishedPulling="2025-11-21 13:40:13.027447942 +0000 UTC m=+251.005339200" observedRunningTime="2025-11-21 13:40:13.261335949 +0000 UTC m=+251.239227217" watchObservedRunningTime="2025-11-21 13:40:13.264533171 +0000 UTC m=+251.242424429" Nov 21 13:40:15 crc kubenswrapper[4721]: I1121 13:40:15.451326 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:40:15 crc kubenswrapper[4721]: I1121 13:40:15.451822 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:40:15 crc kubenswrapper[4721]: I1121 13:40:15.493971 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:40:17 crc kubenswrapper[4721]: I1121 13:40:17.080904 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:40:17 crc kubenswrapper[4721]: I1121 13:40:17.081284 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:40:18 crc kubenswrapper[4721]: I1121 13:40:18.121242 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4tpwf" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="registry-server" probeResult="failure" output=< Nov 21 13:40:18 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Nov 21 13:40:18 crc kubenswrapper[4721]: > Nov 21 13:40:21 crc kubenswrapper[4721]: I1121 13:40:21.467313 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fnzlv"] Nov 21 13:40:25 crc kubenswrapper[4721]: I1121 13:40:25.490597 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:40:27 crc kubenswrapper[4721]: I1121 13:40:27.115510 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:40:27 crc kubenswrapper[4721]: I1121 13:40:27.157185 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:40:27 crc kubenswrapper[4721]: I1121 13:40:27.343577 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4tpwf"] Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.302182 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4tpwf" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="registry-server" containerID="cri-o://6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd" gracePeriod=2 Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.640703 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.838713 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-catalog-content\") pod \"e0026c9c-bbcc-4111-bb3d-42667e381763\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.838762 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2tbx\" (UniqueName: \"kubernetes.io/projected/e0026c9c-bbcc-4111-bb3d-42667e381763-kube-api-access-n2tbx\") pod \"e0026c9c-bbcc-4111-bb3d-42667e381763\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.838804 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-utilities\") pod \"e0026c9c-bbcc-4111-bb3d-42667e381763\" (UID: \"e0026c9c-bbcc-4111-bb3d-42667e381763\") " Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.839820 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-utilities" (OuterVolumeSpecName: "utilities") pod "e0026c9c-bbcc-4111-bb3d-42667e381763" (UID: "e0026c9c-bbcc-4111-bb3d-42667e381763"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.843978 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0026c9c-bbcc-4111-bb3d-42667e381763-kube-api-access-n2tbx" (OuterVolumeSpecName: "kube-api-access-n2tbx") pod "e0026c9c-bbcc-4111-bb3d-42667e381763" (UID: "e0026c9c-bbcc-4111-bb3d-42667e381763"). InnerVolumeSpecName "kube-api-access-n2tbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.926719 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e0026c9c-bbcc-4111-bb3d-42667e381763" (UID: "e0026c9c-bbcc-4111-bb3d-42667e381763"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.940264 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.940296 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2tbx\" (UniqueName: \"kubernetes.io/projected/e0026c9c-bbcc-4111-bb3d-42667e381763-kube-api-access-n2tbx\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:28 crc kubenswrapper[4721]: I1121 13:40:28.940311 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0026c9c-bbcc-4111-bb3d-42667e381763-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.309013 4721 generic.go:334] "Generic (PLEG): container finished" podID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerID="6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd" exitCode=0 Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.309052 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerDied","Data":"6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd"} Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.309102 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4tpwf" event={"ID":"e0026c9c-bbcc-4111-bb3d-42667e381763","Type":"ContainerDied","Data":"be2b5a46228dcb73a4d458ec65db5d9dbb62139b27c057fb38b9629d88035a3b"} Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.309120 4721 scope.go:117] "RemoveContainer" containerID="6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.309119 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4tpwf" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.329640 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4tpwf"] Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.331764 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4tpwf"] Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.335311 4721 scope.go:117] "RemoveContainer" containerID="d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.356279 4721 scope.go:117] "RemoveContainer" containerID="cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.390452 4721 scope.go:117] "RemoveContainer" containerID="6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd" Nov 21 13:40:29 crc kubenswrapper[4721]: E1121 13:40:29.390989 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd\": container with ID starting with 6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd not found: ID does not exist" containerID="6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.391022 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd"} err="failed to get container status \"6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd\": rpc error: code = NotFound desc = could not find container \"6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd\": container with ID starting with 6758815b06d11210d27fe903dbbaaf0f13f14d6c2ff513a1e7f4fb6ab3a876bd not found: ID does not exist" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.391041 4721 scope.go:117] "RemoveContainer" containerID="d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c" Nov 21 13:40:29 crc kubenswrapper[4721]: E1121 13:40:29.391367 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c\": container with ID starting with d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c not found: ID does not exist" containerID="d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.391387 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c"} err="failed to get container status \"d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c\": rpc error: code = NotFound desc = could not find container \"d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c\": container with ID starting with d658b08693aa12c329fe10f17b4fb6ff90e687f04db4ba8136dacc07cb845e7c not found: ID does not exist" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.391414 4721 scope.go:117] "RemoveContainer" containerID="cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c" Nov 21 13:40:29 crc kubenswrapper[4721]: E1121 13:40:29.391678 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c\": container with ID starting with cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c not found: ID does not exist" containerID="cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c" Nov 21 13:40:29 crc kubenswrapper[4721]: I1121 13:40:29.391700 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c"} err="failed to get container status \"cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c\": rpc error: code = NotFound desc = could not find container \"cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c\": container with ID starting with cd57630b5d00a5a053d83fb86670134ef33785029e4deb7a560b30967ed7995c not found: ID does not exist" Nov 21 13:40:30 crc kubenswrapper[4721]: I1121 13:40:30.989370 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" path="/var/lib/kubelet/pods/e0026c9c-bbcc-4111-bb3d-42667e381763/volumes" Nov 21 13:40:46 crc kubenswrapper[4721]: I1121 13:40:46.491847 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" containerID="cri-o://44b41c4d5171d98e8966337fae6844f89345319db26dbd5c9fc04b23352de921" gracePeriod=15 Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.427682 4721 generic.go:334] "Generic (PLEG): container finished" podID="022a1886-afb6-49ca-801b-2a57240223bf" containerID="44b41c4d5171d98e8966337fae6844f89345319db26dbd5c9fc04b23352de921" exitCode=0 Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.427790 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" event={"ID":"022a1886-afb6-49ca-801b-2a57240223bf","Type":"ContainerDied","Data":"44b41c4d5171d98e8966337fae6844f89345319db26dbd5c9fc04b23352de921"} Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.428006 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" event={"ID":"022a1886-afb6-49ca-801b-2a57240223bf","Type":"ContainerDied","Data":"bc00ff42ef23cfe330d3a347c64de0699abda6524f0e98a9aaca661ccf753e2f"} Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.428036 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc00ff42ef23cfe330d3a347c64de0699abda6524f0e98a9aaca661ccf753e2f" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.433972 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492340 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-f9d58f4c-7jtf2"] Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492540 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492554 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492564 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492572 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492590 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492599 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492608 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492616 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492626 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492634 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492648 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492655 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492663 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492671 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492678 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492686 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492701 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492709 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492720 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d80e6e-9ee4-48b1-81d3-a995d6cc9049" containerName="pruner" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492728 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d80e6e-9ee4-48b1-81d3-a995d6cc9049" containerName="pruner" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492743 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492752 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492764 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492772 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="extract-utilities" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492781 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492789 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: E1121 13:40:47.492799 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492807 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="extract-content" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492907 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ba540a-2435-4633-9e07-97311b914cbd" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492921 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d80e6e-9ee4-48b1-81d3-a995d6cc9049" containerName="pruner" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492930 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3fcd3bc-f437-4602-b092-0c9155559334" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492945 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0026c9c-bbcc-4111-bb3d-42667e381763" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492957 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f68146a4-d5d0-41b3-b55a-53fba1ed4a31" containerName="registry-server" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.492968 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="022a1886-afb6-49ca-801b-2a57240223bf" containerName="oauth-openshift" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.493404 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.518807 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f9d58f4c-7jtf2"] Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.582931 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-service-ca\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.582984 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-provider-selection\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583019 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-audit-policies\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583089 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-login\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583120 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-ocp-branding-template\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583144 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-idp-0-file-data\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583176 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-cliconfig\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583202 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttrp9\" (UniqueName: \"kubernetes.io/projected/022a1886-afb6-49ca-801b-2a57240223bf-kube-api-access-ttrp9\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583229 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-router-certs\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583248 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-serving-cert\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583287 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-session\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583314 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-trusted-ca-bundle\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.583417 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/022a1886-afb6-49ca-801b-2a57240223bf-audit-dir\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.584215 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.584402 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.584597 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.584823 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/022a1886-afb6-49ca-801b-2a57240223bf-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.584878 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.584914 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-error\") pod \"022a1886-afb6-49ca-801b-2a57240223bf\" (UID: \"022a1886-afb6-49ca-801b-2a57240223bf\") " Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.585515 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.585539 4721 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/022a1886-afb6-49ca-801b-2a57240223bf-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.585553 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.585565 4721 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.585577 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.589304 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.589756 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/022a1886-afb6-49ca-801b-2a57240223bf-kube-api-access-ttrp9" (OuterVolumeSpecName: "kube-api-access-ttrp9") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "kube-api-access-ttrp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.589887 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.590028 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.591533 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.591808 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.592089 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.592759 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.592907 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "022a1886-afb6-49ca-801b-2a57240223bf" (UID: "022a1886-afb6-49ca-801b-2a57240223bf"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687388 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687479 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687576 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-audit-policies\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687636 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-session\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-error\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687748 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687804 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-router-certs\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687883 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.687996 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fttp6\" (UniqueName: \"kubernetes.io/projected/b9c2672f-9b78-46af-abc3-a15bbc40971d-kube-api-access-fttp6\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688206 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-login\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688323 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9c2672f-9b78-46af-abc3-a15bbc40971d-audit-dir\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688423 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-service-ca\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688492 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688620 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688765 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688845 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688885 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688918 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688948 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.688975 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.689004 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttrp9\" (UniqueName: \"kubernetes.io/projected/022a1886-afb6-49ca-801b-2a57240223bf-kube-api-access-ttrp9\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.689031 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.689057 4721 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/022a1886-afb6-49ca-801b-2a57240223bf-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790654 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-audit-policies\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790734 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-session\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790787 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-error\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790822 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790862 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-router-certs\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790903 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790951 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fttp6\" (UniqueName: \"kubernetes.io/projected/b9c2672f-9b78-46af-abc3-a15bbc40971d-kube-api-access-fttp6\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.790993 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-login\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.791037 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9c2672f-9b78-46af-abc3-a15bbc40971d-audit-dir\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.791108 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-service-ca\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.791145 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.791195 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.791229 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.791304 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.792109 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-audit-policies\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.792568 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.792869 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-service-ca\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.793570 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9c2672f-9b78-46af-abc3-a15bbc40971d-audit-dir\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.795502 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.795933 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-router-certs\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.796183 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.797518 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.798874 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-session\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.799349 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.808682 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.809145 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-login\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.809346 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b9c2672f-9b78-46af-abc3-a15bbc40971d-v4-0-config-user-template-error\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.819376 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fttp6\" (UniqueName: \"kubernetes.io/projected/b9c2672f-9b78-46af-abc3-a15bbc40971d-kube-api-access-fttp6\") pod \"oauth-openshift-f9d58f4c-7jtf2\" (UID: \"b9c2672f-9b78-46af-abc3-a15bbc40971d\") " pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:47 crc kubenswrapper[4721]: I1121 13:40:47.827273 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:48 crc kubenswrapper[4721]: I1121 13:40:48.289222 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f9d58f4c-7jtf2"] Nov 21 13:40:48 crc kubenswrapper[4721]: I1121 13:40:48.438604 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-fnzlv" Nov 21 13:40:48 crc kubenswrapper[4721]: I1121 13:40:48.438610 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" event={"ID":"b9c2672f-9b78-46af-abc3-a15bbc40971d","Type":"ContainerStarted","Data":"be152087e6a26db4956bf8b39ecd40573fdb55b737b73829a433802700ea23a0"} Nov 21 13:40:48 crc kubenswrapper[4721]: I1121 13:40:48.483817 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fnzlv"] Nov 21 13:40:48 crc kubenswrapper[4721]: I1121 13:40:48.487083 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-fnzlv"] Nov 21 13:40:48 crc kubenswrapper[4721]: I1121 13:40:48.988436 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="022a1886-afb6-49ca-801b-2a57240223bf" path="/var/lib/kubelet/pods/022a1886-afb6-49ca-801b-2a57240223bf/volumes" Nov 21 13:40:49 crc kubenswrapper[4721]: I1121 13:40:49.445720 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" event={"ID":"b9c2672f-9b78-46af-abc3-a15bbc40971d","Type":"ContainerStarted","Data":"ac83dce10969b686cbdb0f79b12ad751723d9a53730c1d1d63981305e78e4b2e"} Nov 21 13:40:49 crc kubenswrapper[4721]: I1121 13:40:49.447861 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:40:49 crc kubenswrapper[4721]: I1121 13:40:49.470296 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" podStartSLOduration=28.470273285 podStartE2EDuration="28.470273285s" podCreationTimestamp="2025-11-21 13:40:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:40:49.468106416 +0000 UTC m=+287.445997694" watchObservedRunningTime="2025-11-21 13:40:49.470273285 +0000 UTC m=+287.448164543" Nov 21 13:40:49 crc kubenswrapper[4721]: I1121 13:40:49.503009 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-f9d58f4c-7jtf2" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.455542 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l99jz"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.456989 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l99jz" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="registry-server" containerID="cri-o://4b457bfc24eff2de12b608d0f1537dadf454035a50081fd9806566d63cfe8eff" gracePeriod=30 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.468209 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hplx9"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.468531 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hplx9" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="registry-server" containerID="cri-o://fa8c4d886cc87e4e3f48137cabc820d523dc5944c4661df85b91115ebcd013dc" gracePeriod=30 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.484595 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-78vlb"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.484973 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" containerID="cri-o://83282bec31a0e925fcb4bdb2a42e8cfcbcce89c5fa175be555b298b81caee165" gracePeriod=30 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.503833 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tfkj9"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.504150 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tfkj9" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="registry-server" containerID="cri-o://cdbb9fbed3aa41598ac56fee5e67220a348f7472681e824e6285d6135e517312" gracePeriod=30 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.507428 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pzht5"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.508123 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.512962 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z55jt"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.513236 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z55jt" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="registry-server" containerID="cri-o://b7af1e767118e18b1d5d365ff50155588935c8a32b5cbfac4740c1f70435200a" gracePeriod=30 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.518844 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pzht5"] Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.682745 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.682782 4721 generic.go:334] "Generic (PLEG): container finished" podID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerID="83282bec31a0e925fcb4bdb2a42e8cfcbcce89c5fa175be555b298b81caee165" exitCode=0 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.682817 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.682844 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mb69\" (UniqueName: \"kubernetes.io/projected/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-kube-api-access-6mb69\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.682890 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" event={"ID":"682dcfaa-ba18-421a-bbd2-4c29d22e2373","Type":"ContainerDied","Data":"83282bec31a0e925fcb4bdb2a42e8cfcbcce89c5fa175be555b298b81caee165"} Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.689262 4721 generic.go:334] "Generic (PLEG): container finished" podID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerID="b7af1e767118e18b1d5d365ff50155588935c8a32b5cbfac4740c1f70435200a" exitCode=0 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.689313 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerDied","Data":"b7af1e767118e18b1d5d365ff50155588935c8a32b5cbfac4740c1f70435200a"} Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.699144 4721 generic.go:334] "Generic (PLEG): container finished" podID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerID="cdbb9fbed3aa41598ac56fee5e67220a348f7472681e824e6285d6135e517312" exitCode=0 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.699200 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerDied","Data":"cdbb9fbed3aa41598ac56fee5e67220a348f7472681e824e6285d6135e517312"} Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.706925 4721 generic.go:334] "Generic (PLEG): container finished" podID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerID="4b457bfc24eff2de12b608d0f1537dadf454035a50081fd9806566d63cfe8eff" exitCode=0 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.706971 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l99jz" event={"ID":"f22ea096-32f4-4972-a8c7-f43072dc8eef","Type":"ContainerDied","Data":"4b457bfc24eff2de12b608d0f1537dadf454035a50081fd9806566d63cfe8eff"} Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.714299 4721 generic.go:334] "Generic (PLEG): container finished" podID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerID="fa8c4d886cc87e4e3f48137cabc820d523dc5944c4661df85b91115ebcd013dc" exitCode=0 Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.714329 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hplx9" event={"ID":"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2","Type":"ContainerDied","Data":"fa8c4d886cc87e4e3f48137cabc820d523dc5944c4661df85b91115ebcd013dc"} Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.783765 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.784150 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mb69\" (UniqueName: \"kubernetes.io/projected/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-kube-api-access-6mb69\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.784250 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.785901 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.804828 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.805620 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mb69\" (UniqueName: \"kubernetes.io/projected/b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa-kube-api-access-6mb69\") pod \"marketplace-operator-79b997595-pzht5\" (UID: \"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa\") " pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.848363 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.916128 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.921111 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.926652 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:41:22 crc kubenswrapper[4721]: I1121 13:41:22.954894 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088256 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-catalog-content\") pod \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088316 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-catalog-content\") pod \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088348 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzxjz\" (UniqueName: \"kubernetes.io/projected/682dcfaa-ba18-421a-bbd2-4c29d22e2373-kube-api-access-nzxjz\") pod \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088378 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-trusted-ca\") pod \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088396 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctvz6\" (UniqueName: \"kubernetes.io/projected/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-kube-api-access-ctvz6\") pod \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088416 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-operator-metrics\") pod \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\" (UID: \"682dcfaa-ba18-421a-bbd2-4c29d22e2373\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088439 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzvcq\" (UniqueName: \"kubernetes.io/projected/607645c8-0edd-40d0-959a-5cd14dd67a56-kube-api-access-zzvcq\") pod \"607645c8-0edd-40d0-959a-5cd14dd67a56\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088469 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5m99\" (UniqueName: \"kubernetes.io/projected/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-kube-api-access-p5m99\") pod \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088524 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-utilities\") pod \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\" (UID: \"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088559 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-utilities\") pod \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\" (UID: \"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088578 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-utilities\") pod \"607645c8-0edd-40d0-959a-5cd14dd67a56\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.088594 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-catalog-content\") pod \"607645c8-0edd-40d0-959a-5cd14dd67a56\" (UID: \"607645c8-0edd-40d0-959a-5cd14dd67a56\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.089917 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "682dcfaa-ba18-421a-bbd2-4c29d22e2373" (UID: "682dcfaa-ba18-421a-bbd2-4c29d22e2373"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.089955 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-utilities" (OuterVolumeSpecName: "utilities") pod "136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" (UID: "136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.090883 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-utilities" (OuterVolumeSpecName: "utilities") pod "607645c8-0edd-40d0-959a-5cd14dd67a56" (UID: "607645c8-0edd-40d0-959a-5cd14dd67a56"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.092550 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "682dcfaa-ba18-421a-bbd2-4c29d22e2373" (UID: "682dcfaa-ba18-421a-bbd2-4c29d22e2373"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.092660 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-utilities" (OuterVolumeSpecName: "utilities") pod "0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" (UID: "0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.092818 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/682dcfaa-ba18-421a-bbd2-4c29d22e2373-kube-api-access-nzxjz" (OuterVolumeSpecName: "kube-api-access-nzxjz") pod "682dcfaa-ba18-421a-bbd2-4c29d22e2373" (UID: "682dcfaa-ba18-421a-bbd2-4c29d22e2373"). InnerVolumeSpecName "kube-api-access-nzxjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.092909 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-kube-api-access-p5m99" (OuterVolumeSpecName: "kube-api-access-p5m99") pod "0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" (UID: "0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317"). InnerVolumeSpecName "kube-api-access-p5m99". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.092860 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/607645c8-0edd-40d0-959a-5cd14dd67a56-kube-api-access-zzvcq" (OuterVolumeSpecName: "kube-api-access-zzvcq") pod "607645c8-0edd-40d0-959a-5cd14dd67a56" (UID: "607645c8-0edd-40d0-959a-5cd14dd67a56"). InnerVolumeSpecName "kube-api-access-zzvcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.096829 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-kube-api-access-ctvz6" (OuterVolumeSpecName: "kube-api-access-ctvz6") pod "136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" (UID: "136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2"). InnerVolumeSpecName "kube-api-access-ctvz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.117186 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "607645c8-0edd-40d0-959a-5cd14dd67a56" (UID: "607645c8-0edd-40d0-959a-5cd14dd67a56"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.140432 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" (UID: "136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190532 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzxjz\" (UniqueName: \"kubernetes.io/projected/682dcfaa-ba18-421a-bbd2-4c29d22e2373-kube-api-access-nzxjz\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190569 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190578 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctvz6\" (UniqueName: \"kubernetes.io/projected/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-kube-api-access-ctvz6\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190588 4721 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/682dcfaa-ba18-421a-bbd2-4c29d22e2373-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190600 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzvcq\" (UniqueName: \"kubernetes.io/projected/607645c8-0edd-40d0-959a-5cd14dd67a56-kube-api-access-zzvcq\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190611 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5m99\" (UniqueName: \"kubernetes.io/projected/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-kube-api-access-p5m99\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190622 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190631 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190638 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190646 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/607645c8-0edd-40d0-959a-5cd14dd67a56-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.190655 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.215357 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" (UID: "0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.280815 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.284673 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pzht5"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.291934 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.392898 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz7nf\" (UniqueName: \"kubernetes.io/projected/f22ea096-32f4-4972-a8c7-f43072dc8eef-kube-api-access-cz7nf\") pod \"f22ea096-32f4-4972-a8c7-f43072dc8eef\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.393316 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-catalog-content\") pod \"f22ea096-32f4-4972-a8c7-f43072dc8eef\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.393351 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-utilities\") pod \"f22ea096-32f4-4972-a8c7-f43072dc8eef\" (UID: \"f22ea096-32f4-4972-a8c7-f43072dc8eef\") " Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.394393 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-utilities" (OuterVolumeSpecName: "utilities") pod "f22ea096-32f4-4972-a8c7-f43072dc8eef" (UID: "f22ea096-32f4-4972-a8c7-f43072dc8eef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.395625 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f22ea096-32f4-4972-a8c7-f43072dc8eef-kube-api-access-cz7nf" (OuterVolumeSpecName: "kube-api-access-cz7nf") pod "f22ea096-32f4-4972-a8c7-f43072dc8eef" (UID: "f22ea096-32f4-4972-a8c7-f43072dc8eef"). InnerVolumeSpecName "kube-api-access-cz7nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.453270 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f22ea096-32f4-4972-a8c7-f43072dc8eef" (UID: "f22ea096-32f4-4972-a8c7-f43072dc8eef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.495030 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz7nf\" (UniqueName: \"kubernetes.io/projected/f22ea096-32f4-4972-a8c7-f43072dc8eef-kube-api-access-cz7nf\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.495100 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.495114 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f22ea096-32f4-4972-a8c7-f43072dc8eef-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.721171 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hplx9" event={"ID":"136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2","Type":"ContainerDied","Data":"2fee2cbb68ddc91b648c8027f0fba55b0846baf9835b12ac358033492de763ca"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.721221 4721 scope.go:117] "RemoveContainer" containerID="fa8c4d886cc87e4e3f48137cabc820d523dc5944c4661df85b91115ebcd013dc" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.721308 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hplx9" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.727143 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" event={"ID":"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa","Type":"ContainerStarted","Data":"67c78e977835fd7df4a0be7dd32d157581f73a73428d3f420efe697d4d70227e"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.727186 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" event={"ID":"b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa","Type":"ContainerStarted","Data":"d40eb75639947492ed6773433e9546dd1b13958398fc46bc763b30cb23c7f659"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.729402 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" event={"ID":"682dcfaa-ba18-421a-bbd2-4c29d22e2373","Type":"ContainerDied","Data":"fee435f27623936b652b5da3307bdb7eba25dcaa58a5ad65e20426e9087792d3"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.729465 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-78vlb" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.733457 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z55jt" event={"ID":"0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317","Type":"ContainerDied","Data":"ef78d9f2f5fc55abecfaeb39a15473952f171bfcdcdc4291e3aa4c412e980f7b"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.733490 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z55jt" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.737078 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tfkj9" event={"ID":"607645c8-0edd-40d0-959a-5cd14dd67a56","Type":"ContainerDied","Data":"def5e761496f9fd41a686baedf63e7f7ad00f288f6179fab5ffa7027218a7b29"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.737136 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tfkj9" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.738476 4721 scope.go:117] "RemoveContainer" containerID="e1b12b392ab2f2f5a98e54de33584ccd9fc6d6ba853472869985d652123afba8" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.741628 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l99jz" event={"ID":"f22ea096-32f4-4972-a8c7-f43072dc8eef","Type":"ContainerDied","Data":"66122afeed6d325ce874fe76545eacbd68c8e86217ef17db7f34af950cae8fea"} Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.741706 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l99jz" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.755474 4721 scope.go:117] "RemoveContainer" containerID="9422ebbca8b7aa35809f2cf38bd928d435baefe44175138e8af5dd96d79c7d6a" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.779162 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hplx9"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.784534 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hplx9"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.797306 4721 scope.go:117] "RemoveContainer" containerID="83282bec31a0e925fcb4bdb2a42e8cfcbcce89c5fa175be555b298b81caee165" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.825574 4721 scope.go:117] "RemoveContainer" containerID="b7af1e767118e18b1d5d365ff50155588935c8a32b5cbfac4740c1f70435200a" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.843096 4721 scope.go:117] "RemoveContainer" containerID="376201416b523b3c26a09ed37cb286b05828809b71aab7e1153c1d7ba3a7b5cb" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.847901 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-78vlb"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.849667 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-78vlb"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.856493 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l99jz"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.859810 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l99jz"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.860536 4721 scope.go:117] "RemoveContainer" containerID="088d8a88c86bf03b057eceb89eb1433b22103043a98d556db7c537a39242f191" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.874728 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tfkj9"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.879296 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tfkj9"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.892424 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z55jt"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.894116 4721 scope.go:117] "RemoveContainer" containerID="cdbb9fbed3aa41598ac56fee5e67220a348f7472681e824e6285d6135e517312" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.895087 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z55jt"] Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.904792 4721 scope.go:117] "RemoveContainer" containerID="7d650514d49a9863350acc58685135d573d12d86e131c35d4858dc009d0b00e1" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.916077 4721 scope.go:117] "RemoveContainer" containerID="b4a84b6142d98a6dae82e16e8917102950cbd82e0e07ed33fc84b274fa611459" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.927420 4721 scope.go:117] "RemoveContainer" containerID="4b457bfc24eff2de12b608d0f1537dadf454035a50081fd9806566d63cfe8eff" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.940234 4721 scope.go:117] "RemoveContainer" containerID="be139ac75cf57654704aa0d6f905ad1b2644256027564b56e4d36ab06a49effa" Nov 21 13:41:23 crc kubenswrapper[4721]: I1121 13:41:23.953364 4721 scope.go:117] "RemoveContainer" containerID="595b8d1069ffe31829a2bdebbcd67f2c0299151d4a956a3c0fe3434eb61490f0" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689326 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5fg2s"] Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689606 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689626 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689647 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689660 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689676 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689689 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689707 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689720 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689738 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689750 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689772 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689787 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689843 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689860 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689880 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689895 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689913 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689925 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689943 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689955 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.689971 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.689982 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="extract-utilities" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.690001 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690013 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: E1121 13:41:24.690033 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690044 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="extract-content" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690225 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690248 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690264 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690280 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" containerName="marketplace-operator" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.690307 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" containerName="registry-server" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.691544 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.695226 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.702389 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5fg2s"] Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.747543 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5acaaf53-2bc9-410f-8e00-66627b51e780-utilities\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.747585 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5acaaf53-2bc9-410f-8e00-66627b51e780-catalog-content\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.747633 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpv28\" (UniqueName: \"kubernetes.io/projected/5acaaf53-2bc9-410f-8e00-66627b51e780-kube-api-access-vpv28\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.760293 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.779358 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.792660 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pzht5" podStartSLOduration=2.792640198 podStartE2EDuration="2.792640198s" podCreationTimestamp="2025-11-21 13:41:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:41:24.789573934 +0000 UTC m=+322.767465212" watchObservedRunningTime="2025-11-21 13:41:24.792640198 +0000 UTC m=+322.770531456" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.848701 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5acaaf53-2bc9-410f-8e00-66627b51e780-utilities\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.849202 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5acaaf53-2bc9-410f-8e00-66627b51e780-catalog-content\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.849145 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5acaaf53-2bc9-410f-8e00-66627b51e780-utilities\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.849370 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpv28\" (UniqueName: \"kubernetes.io/projected/5acaaf53-2bc9-410f-8e00-66627b51e780-kube-api-access-vpv28\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.849960 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5acaaf53-2bc9-410f-8e00-66627b51e780-catalog-content\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.874159 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpv28\" (UniqueName: \"kubernetes.io/projected/5acaaf53-2bc9-410f-8e00-66627b51e780-kube-api-access-vpv28\") pod \"redhat-marketplace-5fg2s\" (UID: \"5acaaf53-2bc9-410f-8e00-66627b51e780\") " pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.880634 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-htjrp"] Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.881779 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.883761 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.888946 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-htjrp"] Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.950279 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4617f0e6-c704-425f-836b-d0a713769a2a-catalog-content\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.950334 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4617f0e6-c704-425f-836b-d0a713769a2a-utilities\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.950442 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvh6n\" (UniqueName: \"kubernetes.io/projected/4617f0e6-c704-425f-836b-d0a713769a2a-kube-api-access-pvh6n\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.989606 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317" path="/var/lib/kubelet/pods/0eafd92e-ad3e-4a5f-bdb7-0f2f0fc95317/volumes" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.990275 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2" path="/var/lib/kubelet/pods/136f4c7e-7cb7-4c2f-8bec-e7a9d8f844f2/volumes" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.990854 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="607645c8-0edd-40d0-959a-5cd14dd67a56" path="/var/lib/kubelet/pods/607645c8-0edd-40d0-959a-5cd14dd67a56/volumes" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.992039 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="682dcfaa-ba18-421a-bbd2-4c29d22e2373" path="/var/lib/kubelet/pods/682dcfaa-ba18-421a-bbd2-4c29d22e2373/volumes" Nov 21 13:41:24 crc kubenswrapper[4721]: I1121 13:41:24.992477 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f22ea096-32f4-4972-a8c7-f43072dc8eef" path="/var/lib/kubelet/pods/f22ea096-32f4-4972-a8c7-f43072dc8eef/volumes" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.051248 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4617f0e6-c704-425f-836b-d0a713769a2a-utilities\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.051343 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvh6n\" (UniqueName: \"kubernetes.io/projected/4617f0e6-c704-425f-836b-d0a713769a2a-kube-api-access-pvh6n\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.051388 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4617f0e6-c704-425f-836b-d0a713769a2a-catalog-content\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.051865 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4617f0e6-c704-425f-836b-d0a713769a2a-utilities\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.051896 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4617f0e6-c704-425f-836b-d0a713769a2a-catalog-content\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.064009 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.079375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvh6n\" (UniqueName: \"kubernetes.io/projected/4617f0e6-c704-425f-836b-d0a713769a2a-kube-api-access-pvh6n\") pod \"redhat-operators-htjrp\" (UID: \"4617f0e6-c704-425f-836b-d0a713769a2a\") " pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.208607 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.271537 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5fg2s"] Nov 21 13:41:25 crc kubenswrapper[4721]: W1121 13:41:25.280192 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5acaaf53_2bc9_410f_8e00_66627b51e780.slice/crio-f571f2c5b1065c8d1d8b2394f4b06f50b6dc4bbf1066156b5a9700a8c3385056 WatchSource:0}: Error finding container f571f2c5b1065c8d1d8b2394f4b06f50b6dc4bbf1066156b5a9700a8c3385056: Status 404 returned error can't find the container with id f571f2c5b1065c8d1d8b2394f4b06f50b6dc4bbf1066156b5a9700a8c3385056 Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.404946 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-htjrp"] Nov 21 13:41:25 crc kubenswrapper[4721]: W1121 13:41:25.412885 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4617f0e6_c704_425f_836b_d0a713769a2a.slice/crio-93b49a22b2350190d4cc0e6527be3b051e149143de02919447319bc35bfed88e WatchSource:0}: Error finding container 93b49a22b2350190d4cc0e6527be3b051e149143de02919447319bc35bfed88e: Status 404 returned error can't find the container with id 93b49a22b2350190d4cc0e6527be3b051e149143de02919447319bc35bfed88e Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.765544 4721 generic.go:334] "Generic (PLEG): container finished" podID="5acaaf53-2bc9-410f-8e00-66627b51e780" containerID="65844f6abd6ba8b5a71b90cbf246ca94ddf388dcc038aae9c7bf95553ada5870" exitCode=0 Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.765839 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5fg2s" event={"ID":"5acaaf53-2bc9-410f-8e00-66627b51e780","Type":"ContainerDied","Data":"65844f6abd6ba8b5a71b90cbf246ca94ddf388dcc038aae9c7bf95553ada5870"} Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.765864 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5fg2s" event={"ID":"5acaaf53-2bc9-410f-8e00-66627b51e780","Type":"ContainerStarted","Data":"f571f2c5b1065c8d1d8b2394f4b06f50b6dc4bbf1066156b5a9700a8c3385056"} Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.770217 4721 generic.go:334] "Generic (PLEG): container finished" podID="4617f0e6-c704-425f-836b-d0a713769a2a" containerID="e0b2dfb66c56160c9b0623d1bcbe081b8ab0101e028468ff3c9804ee96a8ed2d" exitCode=0 Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.770747 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-htjrp" event={"ID":"4617f0e6-c704-425f-836b-d0a713769a2a","Type":"ContainerDied","Data":"e0b2dfb66c56160c9b0623d1bcbe081b8ab0101e028468ff3c9804ee96a8ed2d"} Nov 21 13:41:25 crc kubenswrapper[4721]: I1121 13:41:25.770781 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-htjrp" event={"ID":"4617f0e6-c704-425f-836b-d0a713769a2a","Type":"ContainerStarted","Data":"93b49a22b2350190d4cc0e6527be3b051e149143de02919447319bc35bfed88e"} Nov 21 13:41:26 crc kubenswrapper[4721]: I1121 13:41:26.782294 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-htjrp" event={"ID":"4617f0e6-c704-425f-836b-d0a713769a2a","Type":"ContainerStarted","Data":"0905a9c4fcad7be80b39bc941666303ed6473713a336c71c7e00e3ebc449e105"} Nov 21 13:41:26 crc kubenswrapper[4721]: I1121 13:41:26.785721 4721 generic.go:334] "Generic (PLEG): container finished" podID="5acaaf53-2bc9-410f-8e00-66627b51e780" containerID="388211a43e0e05b5eebec8db40f5c7ee8dfe0454b205f0bbcfb4d42e7ad970b8" exitCode=0 Nov 21 13:41:26 crc kubenswrapper[4721]: I1121 13:41:26.785813 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5fg2s" event={"ID":"5acaaf53-2bc9-410f-8e00-66627b51e780","Type":"ContainerDied","Data":"388211a43e0e05b5eebec8db40f5c7ee8dfe0454b205f0bbcfb4d42e7ad970b8"} Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.080799 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ds4sj"] Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.082900 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.094139 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ds4sj"] Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.094510 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.281217 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-phxzd"] Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.283792 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.286186 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.286713 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-utilities\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.286956 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsnb8\" (UniqueName: \"kubernetes.io/projected/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-kube-api-access-nsnb8\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.287140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-catalog-content\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.295231 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-phxzd"] Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.388299 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/273316c7-1207-4f09-94c4-ed63761df558-catalog-content\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.388958 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-catalog-content\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.389000 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/273316c7-1207-4f09-94c4-ed63761df558-utilities\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.389022 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-utilities\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.389055 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65c7n\" (UniqueName: \"kubernetes.io/projected/273316c7-1207-4f09-94c4-ed63761df558-kube-api-access-65c7n\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.389106 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsnb8\" (UniqueName: \"kubernetes.io/projected/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-kube-api-access-nsnb8\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.389476 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-utilities\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.389639 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-catalog-content\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.410843 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsnb8\" (UniqueName: \"kubernetes.io/projected/3197bb81-e64b-4ece-811a-0deb7c9cdaeb-kube-api-access-nsnb8\") pod \"certified-operators-ds4sj\" (UID: \"3197bb81-e64b-4ece-811a-0deb7c9cdaeb\") " pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.449368 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.490876 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/273316c7-1207-4f09-94c4-ed63761df558-catalog-content\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.491002 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/273316c7-1207-4f09-94c4-ed63761df558-utilities\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.491079 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65c7n\" (UniqueName: \"kubernetes.io/projected/273316c7-1207-4f09-94c4-ed63761df558-kube-api-access-65c7n\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.491537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/273316c7-1207-4f09-94c4-ed63761df558-utilities\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.491713 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/273316c7-1207-4f09-94c4-ed63761df558-catalog-content\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.508336 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65c7n\" (UniqueName: \"kubernetes.io/projected/273316c7-1207-4f09-94c4-ed63761df558-kube-api-access-65c7n\") pod \"community-operators-phxzd\" (UID: \"273316c7-1207-4f09-94c4-ed63761df558\") " pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.665312 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ds4sj"] Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.677309 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:27 crc kubenswrapper[4721]: W1121 13:41:27.691248 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3197bb81_e64b_4ece_811a_0deb7c9cdaeb.slice/crio-d6b2c26f44d740567e61b451df2e1a11bb825f413e7c122f12bac78e072453df WatchSource:0}: Error finding container d6b2c26f44d740567e61b451df2e1a11bb825f413e7c122f12bac78e072453df: Status 404 returned error can't find the container with id d6b2c26f44d740567e61b451df2e1a11bb825f413e7c122f12bac78e072453df Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.792666 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ds4sj" event={"ID":"3197bb81-e64b-4ece-811a-0deb7c9cdaeb","Type":"ContainerStarted","Data":"d6b2c26f44d740567e61b451df2e1a11bb825f413e7c122f12bac78e072453df"} Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.796953 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5fg2s" event={"ID":"5acaaf53-2bc9-410f-8e00-66627b51e780","Type":"ContainerStarted","Data":"32358c49833b28f076f7d0c9ba8f66d2903ca1f64ee577ecfc912cd163788f85"} Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.808774 4721 generic.go:334] "Generic (PLEG): container finished" podID="4617f0e6-c704-425f-836b-d0a713769a2a" containerID="0905a9c4fcad7be80b39bc941666303ed6473713a336c71c7e00e3ebc449e105" exitCode=0 Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.808833 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-htjrp" event={"ID":"4617f0e6-c704-425f-836b-d0a713769a2a","Type":"ContainerDied","Data":"0905a9c4fcad7be80b39bc941666303ed6473713a336c71c7e00e3ebc449e105"} Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.837467 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5fg2s" podStartSLOduration=2.34519012 podStartE2EDuration="3.837443923s" podCreationTimestamp="2025-11-21 13:41:24 +0000 UTC" firstStartedPulling="2025-11-21 13:41:25.767372208 +0000 UTC m=+323.745263466" lastFinishedPulling="2025-11-21 13:41:27.259626021 +0000 UTC m=+325.237517269" observedRunningTime="2025-11-21 13:41:27.828345433 +0000 UTC m=+325.806236691" watchObservedRunningTime="2025-11-21 13:41:27.837443923 +0000 UTC m=+325.815335181" Nov 21 13:41:27 crc kubenswrapper[4721]: I1121 13:41:27.874745 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-phxzd"] Nov 21 13:41:27 crc kubenswrapper[4721]: W1121 13:41:27.928682 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod273316c7_1207_4f09_94c4_ed63761df558.slice/crio-005f386e76a898bc751f53ea5aa72a05d1532dc084ccc88ddc6b06463de17c81 WatchSource:0}: Error finding container 005f386e76a898bc751f53ea5aa72a05d1532dc084ccc88ddc6b06463de17c81: Status 404 returned error can't find the container with id 005f386e76a898bc751f53ea5aa72a05d1532dc084ccc88ddc6b06463de17c81 Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.817533 4721 generic.go:334] "Generic (PLEG): container finished" podID="273316c7-1207-4f09-94c4-ed63761df558" containerID="d27aca3cd7913c32f342e12af978c793a2f760d51e520607aebcc536ff9adf17" exitCode=0 Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.817642 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phxzd" event={"ID":"273316c7-1207-4f09-94c4-ed63761df558","Type":"ContainerDied","Data":"d27aca3cd7913c32f342e12af978c793a2f760d51e520607aebcc536ff9adf17"} Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.818088 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phxzd" event={"ID":"273316c7-1207-4f09-94c4-ed63761df558","Type":"ContainerStarted","Data":"005f386e76a898bc751f53ea5aa72a05d1532dc084ccc88ddc6b06463de17c81"} Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.825026 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-htjrp" event={"ID":"4617f0e6-c704-425f-836b-d0a713769a2a","Type":"ContainerStarted","Data":"d9073b800fdcaa9850cf6748ca7c19bc5cde4798f604e9ffdbe248a5e9e4bf39"} Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.827569 4721 generic.go:334] "Generic (PLEG): container finished" podID="3197bb81-e64b-4ece-811a-0deb7c9cdaeb" containerID="2cd88a11fe82223b6154d7b79cf18e40c1ecb51f12aa87a24af033681e0eb819" exitCode=0 Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.828275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ds4sj" event={"ID":"3197bb81-e64b-4ece-811a-0deb7c9cdaeb","Type":"ContainerDied","Data":"2cd88a11fe82223b6154d7b79cf18e40c1ecb51f12aa87a24af033681e0eb819"} Nov 21 13:41:28 crc kubenswrapper[4721]: I1121 13:41:28.914974 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-htjrp" podStartSLOduration=2.390083389 podStartE2EDuration="4.914946916s" podCreationTimestamp="2025-11-21 13:41:24 +0000 UTC" firstStartedPulling="2025-11-21 13:41:25.776839995 +0000 UTC m=+323.754731253" lastFinishedPulling="2025-11-21 13:41:28.301703522 +0000 UTC m=+326.279594780" observedRunningTime="2025-11-21 13:41:28.882005468 +0000 UTC m=+326.859896726" watchObservedRunningTime="2025-11-21 13:41:28.914946916 +0000 UTC m=+326.892838174" Nov 21 13:41:29 crc kubenswrapper[4721]: I1121 13:41:29.835546 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phxzd" event={"ID":"273316c7-1207-4f09-94c4-ed63761df558","Type":"ContainerStarted","Data":"edfabd9882984fcbda6caced9c278d03ac60bfd87f4098bcf3dc6f263e738c62"} Nov 21 13:41:29 crc kubenswrapper[4721]: I1121 13:41:29.845626 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ds4sj" event={"ID":"3197bb81-e64b-4ece-811a-0deb7c9cdaeb","Type":"ContainerStarted","Data":"24c4864ff087b519ce7f5965de36d69f7d80141d3770527fc87ec3040a003e93"} Nov 21 13:41:30 crc kubenswrapper[4721]: I1121 13:41:30.852496 4721 generic.go:334] "Generic (PLEG): container finished" podID="273316c7-1207-4f09-94c4-ed63761df558" containerID="edfabd9882984fcbda6caced9c278d03ac60bfd87f4098bcf3dc6f263e738c62" exitCode=0 Nov 21 13:41:30 crc kubenswrapper[4721]: I1121 13:41:30.852604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phxzd" event={"ID":"273316c7-1207-4f09-94c4-ed63761df558","Type":"ContainerDied","Data":"edfabd9882984fcbda6caced9c278d03ac60bfd87f4098bcf3dc6f263e738c62"} Nov 21 13:41:30 crc kubenswrapper[4721]: I1121 13:41:30.855622 4721 generic.go:334] "Generic (PLEG): container finished" podID="3197bb81-e64b-4ece-811a-0deb7c9cdaeb" containerID="24c4864ff087b519ce7f5965de36d69f7d80141d3770527fc87ec3040a003e93" exitCode=0 Nov 21 13:41:30 crc kubenswrapper[4721]: I1121 13:41:30.855665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ds4sj" event={"ID":"3197bb81-e64b-4ece-811a-0deb7c9cdaeb","Type":"ContainerDied","Data":"24c4864ff087b519ce7f5965de36d69f7d80141d3770527fc87ec3040a003e93"} Nov 21 13:41:32 crc kubenswrapper[4721]: I1121 13:41:32.872431 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phxzd" event={"ID":"273316c7-1207-4f09-94c4-ed63761df558","Type":"ContainerStarted","Data":"a4acc990d7b4c4d5644c46724b4ed03a2160c6d0757b9f2fa0bbd6338dddebae"} Nov 21 13:41:32 crc kubenswrapper[4721]: I1121 13:41:32.876439 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ds4sj" event={"ID":"3197bb81-e64b-4ece-811a-0deb7c9cdaeb","Type":"ContainerStarted","Data":"c4e89e1600e1a15499ac7c40d9bbc15cd5072f3ff6bb8fa222a460eff915f836"} Nov 21 13:41:33 crc kubenswrapper[4721]: I1121 13:41:33.912567 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ds4sj" podStartSLOduration=4.309956418 podStartE2EDuration="6.912553829s" podCreationTimestamp="2025-11-21 13:41:27 +0000 UTC" firstStartedPulling="2025-11-21 13:41:28.832969327 +0000 UTC m=+326.810860585" lastFinishedPulling="2025-11-21 13:41:31.435566738 +0000 UTC m=+329.413457996" observedRunningTime="2025-11-21 13:41:33.911409839 +0000 UTC m=+331.889301097" watchObservedRunningTime="2025-11-21 13:41:33.912553829 +0000 UTC m=+331.890445087" Nov 21 13:41:33 crc kubenswrapper[4721]: I1121 13:41:33.931488 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-phxzd" podStartSLOduration=4.469044101 podStartE2EDuration="6.931464897s" podCreationTimestamp="2025-11-21 13:41:27 +0000 UTC" firstStartedPulling="2025-11-21 13:41:28.819404113 +0000 UTC m=+326.797295411" lastFinishedPulling="2025-11-21 13:41:31.281824929 +0000 UTC m=+329.259716207" observedRunningTime="2025-11-21 13:41:33.929007132 +0000 UTC m=+331.906898390" watchObservedRunningTime="2025-11-21 13:41:33.931464897 +0000 UTC m=+331.909356155" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.065005 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.065357 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.112671 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.209114 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.209196 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.280234 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.947149 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-htjrp" Nov 21 13:41:35 crc kubenswrapper[4721]: I1121 13:41:35.950702 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5fg2s" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.450022 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.450147 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.505631 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.678696 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.678767 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.719290 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.959232 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ds4sj" Nov 21 13:41:37 crc kubenswrapper[4721]: I1121 13:41:37.972666 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-phxzd" Nov 21 13:42:35 crc kubenswrapper[4721]: I1121 13:42:35.262490 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:42:35 crc kubenswrapper[4721]: I1121 13:42:35.262997 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:43:05 crc kubenswrapper[4721]: I1121 13:43:05.263045 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:43:05 crc kubenswrapper[4721]: I1121 13:43:05.263836 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:43:12 crc kubenswrapper[4721]: I1121 13:43:12.925850 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4l4dc"] Nov 21 13:43:12 crc kubenswrapper[4721]: I1121 13:43:12.927219 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:12 crc kubenswrapper[4721]: I1121 13:43:12.949298 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4l4dc"] Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018601 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018714 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-bound-sa-token\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018749 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/583ac5ae-6d31-49d6-ae59-3dc066455b4f-registry-certificates\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018775 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/583ac5ae-6d31-49d6-ae59-3dc066455b4f-trusted-ca\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018832 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/583ac5ae-6d31-49d6-ae59-3dc066455b4f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018884 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-registry-tls\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018912 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/583ac5ae-6d31-49d6-ae59-3dc066455b4f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.018940 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmf76\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-kube-api-access-kmf76\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.052755 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.119844 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-bound-sa-token\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.120163 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/583ac5ae-6d31-49d6-ae59-3dc066455b4f-registry-certificates\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.120301 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/583ac5ae-6d31-49d6-ae59-3dc066455b4f-trusted-ca\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.120441 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/583ac5ae-6d31-49d6-ae59-3dc066455b4f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.120554 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-registry-tls\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.120675 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/583ac5ae-6d31-49d6-ae59-3dc066455b4f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.120794 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmf76\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-kube-api-access-kmf76\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.121016 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/583ac5ae-6d31-49d6-ae59-3dc066455b4f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.121751 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/583ac5ae-6d31-49d6-ae59-3dc066455b4f-trusted-ca\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.121833 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/583ac5ae-6d31-49d6-ae59-3dc066455b4f-registry-certificates\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.130663 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/583ac5ae-6d31-49d6-ae59-3dc066455b4f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.137146 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-registry-tls\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.139187 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-bound-sa-token\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.139880 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmf76\" (UniqueName: \"kubernetes.io/projected/583ac5ae-6d31-49d6-ae59-3dc066455b4f-kube-api-access-kmf76\") pod \"image-registry-66df7c8f76-4l4dc\" (UID: \"583ac5ae-6d31-49d6-ae59-3dc066455b4f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.245910 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.470514 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4l4dc"] Nov 21 13:43:13 crc kubenswrapper[4721]: W1121 13:43:13.477314 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod583ac5ae_6d31_49d6_ae59_3dc066455b4f.slice/crio-56284e5fc9cd3d70fcb92fd534b7d1128df70542f7bb0b46387ec7fc06a3f677 WatchSource:0}: Error finding container 56284e5fc9cd3d70fcb92fd534b7d1128df70542f7bb0b46387ec7fc06a3f677: Status 404 returned error can't find the container with id 56284e5fc9cd3d70fcb92fd534b7d1128df70542f7bb0b46387ec7fc06a3f677 Nov 21 13:43:13 crc kubenswrapper[4721]: I1121 13:43:13.558016 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" event={"ID":"583ac5ae-6d31-49d6-ae59-3dc066455b4f","Type":"ContainerStarted","Data":"56284e5fc9cd3d70fcb92fd534b7d1128df70542f7bb0b46387ec7fc06a3f677"} Nov 21 13:43:14 crc kubenswrapper[4721]: I1121 13:43:14.565177 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" event={"ID":"583ac5ae-6d31-49d6-ae59-3dc066455b4f","Type":"ContainerStarted","Data":"497f91d213644b8817b3ad89ec17f1291b442a7c622f572c99c13a2fa872b905"} Nov 21 13:43:14 crc kubenswrapper[4721]: I1121 13:43:14.565545 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:14 crc kubenswrapper[4721]: I1121 13:43:14.589913 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" podStartSLOduration=2.589886249 podStartE2EDuration="2.589886249s" podCreationTimestamp="2025-11-21 13:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:43:14.588882402 +0000 UTC m=+432.566773740" watchObservedRunningTime="2025-11-21 13:43:14.589886249 +0000 UTC m=+432.567777527" Nov 21 13:43:33 crc kubenswrapper[4721]: I1121 13:43:33.257319 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-4l4dc" Nov 21 13:43:33 crc kubenswrapper[4721]: I1121 13:43:33.347815 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-crzdf"] Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.262360 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.262935 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.262984 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.263515 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cc646a5815972aaf7a75fc681835a3564be98e7ddc4ac19072eaae8996d79323"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.263578 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://cc646a5815972aaf7a75fc681835a3564be98e7ddc4ac19072eaae8996d79323" gracePeriod=600 Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.700328 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="cc646a5815972aaf7a75fc681835a3564be98e7ddc4ac19072eaae8996d79323" exitCode=0 Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.700436 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"cc646a5815972aaf7a75fc681835a3564be98e7ddc4ac19072eaae8996d79323"} Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.700700 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"2e0824dfbe9576b06ae1544acece2d07845bf0e4ae9bdf32618bab9f29b83047"} Nov 21 13:43:35 crc kubenswrapper[4721]: I1121 13:43:35.700734 4721 scope.go:117] "RemoveContainer" containerID="ff440ed26be2ef376bb29620ff14669ebe3582fc29048331bd9cb5c60f8bf0d6" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.410462 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" podUID="b6ae0efb-0b79-4944-9dbb-44da8a370450" containerName="registry" containerID="cri-o://bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee" gracePeriod=30 Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.747557 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.852913 4721 generic.go:334] "Generic (PLEG): container finished" podID="b6ae0efb-0b79-4944-9dbb-44da8a370450" containerID="bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee" exitCode=0 Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.852954 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" event={"ID":"b6ae0efb-0b79-4944-9dbb-44da8a370450","Type":"ContainerDied","Data":"bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee"} Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.852978 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" event={"ID":"b6ae0efb-0b79-4944-9dbb-44da8a370450","Type":"ContainerDied","Data":"670e2f679d865a9de5c41d30e42978d0fb9cb64ac31db2343489d3f18ae96308"} Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.852995 4721 scope.go:117] "RemoveContainer" containerID="bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.853112 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-crzdf" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.878769 4721 scope.go:117] "RemoveContainer" containerID="bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee" Nov 21 13:43:58 crc kubenswrapper[4721]: E1121 13:43:58.879435 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee\": container with ID starting with bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee not found: ID does not exist" containerID="bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.879470 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee"} err="failed to get container status \"bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee\": rpc error: code = NotFound desc = could not find container \"bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee\": container with ID starting with bee9ee06fedd88f3846e0e945039f0ab9f4809f204bb87c8dbc1e9d18ad2b5ee not found: ID does not exist" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.939998 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-trusted-ca\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.940157 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-bound-sa-token\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.940196 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b6ae0efb-0b79-4944-9dbb-44da8a370450-ca-trust-extracted\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.940231 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b6ae0efb-0b79-4944-9dbb-44da8a370450-installation-pull-secrets\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.940266 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-certificates\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.941216 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.941417 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.941521 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.941612 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52krc\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-kube-api-access-52krc\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.941684 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-tls\") pod \"b6ae0efb-0b79-4944-9dbb-44da8a370450\" (UID: \"b6ae0efb-0b79-4944-9dbb-44da8a370450\") " Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.942245 4721 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.942261 4721 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.949203 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.949681 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.950114 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ae0efb-0b79-4944-9dbb-44da8a370450-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.950684 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-kube-api-access-52krc" (OuterVolumeSpecName: "kube-api-access-52krc") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "kube-api-access-52krc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.960346 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 13:43:58 crc kubenswrapper[4721]: I1121 13:43:58.964500 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6ae0efb-0b79-4944-9dbb-44da8a370450-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b6ae0efb-0b79-4944-9dbb-44da8a370450" (UID: "b6ae0efb-0b79-4944-9dbb-44da8a370450"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.043041 4721 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.043103 4721 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b6ae0efb-0b79-4944-9dbb-44da8a370450-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.043120 4721 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b6ae0efb-0b79-4944-9dbb-44da8a370450-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.043136 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52krc\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-kube-api-access-52krc\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.043150 4721 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b6ae0efb-0b79-4944-9dbb-44da8a370450-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 21 13:43:59 crc kubenswrapper[4721]: E1121 13:43:59.057793 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6ae0efb_0b79_4944_9dbb_44da8a370450.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6ae0efb_0b79_4944_9dbb_44da8a370450.slice/crio-670e2f679d865a9de5c41d30e42978d0fb9cb64ac31db2343489d3f18ae96308\": RecentStats: unable to find data in memory cache]" Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.189322 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-crzdf"] Nov 21 13:43:59 crc kubenswrapper[4721]: I1121 13:43:59.192592 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-crzdf"] Nov 21 13:44:00 crc kubenswrapper[4721]: I1121 13:44:00.989611 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ae0efb-0b79-4944-9dbb-44da8a370450" path="/var/lib/kubelet/pods/b6ae0efb-0b79-4944-9dbb-44da8a370450/volumes" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.153441 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn"] Nov 21 13:45:00 crc kubenswrapper[4721]: E1121 13:45:00.154459 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ae0efb-0b79-4944-9dbb-44da8a370450" containerName="registry" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.154474 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ae0efb-0b79-4944-9dbb-44da8a370450" containerName="registry" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.154666 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ae0efb-0b79-4944-9dbb-44da8a370450" containerName="registry" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.155225 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.159025 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.160031 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.168376 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn"] Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.290747 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pllkj\" (UniqueName: \"kubernetes.io/projected/173c0bff-958a-4565-a578-9d62cef76971-kube-api-access-pllkj\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.292276 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/173c0bff-958a-4565-a578-9d62cef76971-secret-volume\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.292495 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/173c0bff-958a-4565-a578-9d62cef76971-config-volume\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.395090 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/173c0bff-958a-4565-a578-9d62cef76971-secret-volume\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.395165 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/173c0bff-958a-4565-a578-9d62cef76971-config-volume\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.395203 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pllkj\" (UniqueName: \"kubernetes.io/projected/173c0bff-958a-4565-a578-9d62cef76971-kube-api-access-pllkj\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.396628 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/173c0bff-958a-4565-a578-9d62cef76971-config-volume\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.401478 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/173c0bff-958a-4565-a578-9d62cef76971-secret-volume\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.415519 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pllkj\" (UniqueName: \"kubernetes.io/projected/173c0bff-958a-4565-a578-9d62cef76971-kube-api-access-pllkj\") pod \"collect-profiles-29395545-jg4pn\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.475570 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:00 crc kubenswrapper[4721]: I1121 13:45:00.701709 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn"] Nov 21 13:45:01 crc kubenswrapper[4721]: I1121 13:45:01.251362 4721 generic.go:334] "Generic (PLEG): container finished" podID="173c0bff-958a-4565-a578-9d62cef76971" containerID="3529d75eec33ef4ce5350238987202c40e52a593a75361ab76942ba95e359463" exitCode=0 Nov 21 13:45:01 crc kubenswrapper[4721]: I1121 13:45:01.251470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" event={"ID":"173c0bff-958a-4565-a578-9d62cef76971","Type":"ContainerDied","Data":"3529d75eec33ef4ce5350238987202c40e52a593a75361ab76942ba95e359463"} Nov 21 13:45:01 crc kubenswrapper[4721]: I1121 13:45:01.251801 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" event={"ID":"173c0bff-958a-4565-a578-9d62cef76971","Type":"ContainerStarted","Data":"df45a8eb5d7c9cac90cf0a6755193749baf336f99cb8cc40e25b8d26c0f5edcd"} Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.489637 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.620088 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pllkj\" (UniqueName: \"kubernetes.io/projected/173c0bff-958a-4565-a578-9d62cef76971-kube-api-access-pllkj\") pod \"173c0bff-958a-4565-a578-9d62cef76971\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.620176 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/173c0bff-958a-4565-a578-9d62cef76971-secret-volume\") pod \"173c0bff-958a-4565-a578-9d62cef76971\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.620206 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/173c0bff-958a-4565-a578-9d62cef76971-config-volume\") pod \"173c0bff-958a-4565-a578-9d62cef76971\" (UID: \"173c0bff-958a-4565-a578-9d62cef76971\") " Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.620934 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/173c0bff-958a-4565-a578-9d62cef76971-config-volume" (OuterVolumeSpecName: "config-volume") pod "173c0bff-958a-4565-a578-9d62cef76971" (UID: "173c0bff-958a-4565-a578-9d62cef76971"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.626174 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/173c0bff-958a-4565-a578-9d62cef76971-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "173c0bff-958a-4565-a578-9d62cef76971" (UID: "173c0bff-958a-4565-a578-9d62cef76971"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.626807 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/173c0bff-958a-4565-a578-9d62cef76971-kube-api-access-pllkj" (OuterVolumeSpecName: "kube-api-access-pllkj") pod "173c0bff-958a-4565-a578-9d62cef76971" (UID: "173c0bff-958a-4565-a578-9d62cef76971"). InnerVolumeSpecName "kube-api-access-pllkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.721718 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pllkj\" (UniqueName: \"kubernetes.io/projected/173c0bff-958a-4565-a578-9d62cef76971-kube-api-access-pllkj\") on node \"crc\" DevicePath \"\"" Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.721760 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/173c0bff-958a-4565-a578-9d62cef76971-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 13:45:02 crc kubenswrapper[4721]: I1121 13:45:02.721770 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/173c0bff-958a-4565-a578-9d62cef76971-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 13:45:03 crc kubenswrapper[4721]: I1121 13:45:03.193092 4721 scope.go:117] "RemoveContainer" containerID="44b41c4d5171d98e8966337fae6844f89345319db26dbd5c9fc04b23352de921" Nov 21 13:45:03 crc kubenswrapper[4721]: I1121 13:45:03.264490 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" event={"ID":"173c0bff-958a-4565-a578-9d62cef76971","Type":"ContainerDied","Data":"df45a8eb5d7c9cac90cf0a6755193749baf336f99cb8cc40e25b8d26c0f5edcd"} Nov 21 13:45:03 crc kubenswrapper[4721]: I1121 13:45:03.264544 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df45a8eb5d7c9cac90cf0a6755193749baf336f99cb8cc40e25b8d26c0f5edcd" Nov 21 13:45:03 crc kubenswrapper[4721]: I1121 13:45:03.264549 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395545-jg4pn" Nov 21 13:45:35 crc kubenswrapper[4721]: I1121 13:45:35.262137 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:45:35 crc kubenswrapper[4721]: I1121 13:45:35.262752 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:46:05 crc kubenswrapper[4721]: I1121 13:46:05.262042 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:46:05 crc kubenswrapper[4721]: I1121 13:46:05.262563 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.261803 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.263505 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.263640 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.264393 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2e0824dfbe9576b06ae1544acece2d07845bf0e4ae9bdf32618bab9f29b83047"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.264575 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://2e0824dfbe9576b06ae1544acece2d07845bf0e4ae9bdf32618bab9f29b83047" gracePeriod=600 Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.919217 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="2e0824dfbe9576b06ae1544acece2d07845bf0e4ae9bdf32618bab9f29b83047" exitCode=0 Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.919305 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"2e0824dfbe9576b06ae1544acece2d07845bf0e4ae9bdf32618bab9f29b83047"} Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.919589 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"3c25faaf13321f5c86af730ca041b03706f375a923a1b990e773cef7516bc285"} Nov 21 13:46:35 crc kubenswrapper[4721]: I1121 13:46:35.919611 4721 scope.go:117] "RemoveContainer" containerID="cc646a5815972aaf7a75fc681835a3564be98e7ddc4ac19072eaae8996d79323" Nov 21 13:48:35 crc kubenswrapper[4721]: I1121 13:48:35.262270 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:48:35 crc kubenswrapper[4721]: I1121 13:48:35.263093 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:48:39 crc kubenswrapper[4721]: I1121 13:48:39.489837 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dmhsg"] Nov 21 13:48:39 crc kubenswrapper[4721]: I1121 13:48:39.490555 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" containerID="cri-o://7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c" gracePeriod=30 Nov 21 13:48:39 crc kubenswrapper[4721]: I1121 13:48:39.578186 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl"] Nov 21 13:48:39 crc kubenswrapper[4721]: I1121 13:48:39.578452 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" podUID="180dc212-b361-48e7-ad3e-70328538c4c6" containerName="route-controller-manager" containerID="cri-o://948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf" gracePeriod=30 Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.195752 4721 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dmhsg container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.196311 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.524756 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.532593 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.634945 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-proxy-ca-bundles\") pod \"305d9e46-16ee-4444-a4af-c58ac4c014f2\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.634998 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-config\") pod \"305d9e46-16ee-4444-a4af-c58ac4c014f2\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635024 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-client-ca\") pod \"305d9e46-16ee-4444-a4af-c58ac4c014f2\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635058 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-client-ca\") pod \"180dc212-b361-48e7-ad3e-70328538c4c6\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635114 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/180dc212-b361-48e7-ad3e-70328538c4c6-serving-cert\") pod \"180dc212-b361-48e7-ad3e-70328538c4c6\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635147 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-config\") pod \"180dc212-b361-48e7-ad3e-70328538c4c6\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635177 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/305d9e46-16ee-4444-a4af-c58ac4c014f2-serving-cert\") pod \"305d9e46-16ee-4444-a4af-c58ac4c014f2\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635193 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppjss\" (UniqueName: \"kubernetes.io/projected/180dc212-b361-48e7-ad3e-70328538c4c6-kube-api-access-ppjss\") pod \"180dc212-b361-48e7-ad3e-70328538c4c6\" (UID: \"180dc212-b361-48e7-ad3e-70328538c4c6\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635209 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9xpx\" (UniqueName: \"kubernetes.io/projected/305d9e46-16ee-4444-a4af-c58ac4c014f2-kube-api-access-t9xpx\") pod \"305d9e46-16ee-4444-a4af-c58ac4c014f2\" (UID: \"305d9e46-16ee-4444-a4af-c58ac4c014f2\") " Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635941 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "305d9e46-16ee-4444-a4af-c58ac4c014f2" (UID: "305d9e46-16ee-4444-a4af-c58ac4c014f2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635950 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-config" (OuterVolumeSpecName: "config") pod "180dc212-b361-48e7-ad3e-70328538c4c6" (UID: "180dc212-b361-48e7-ad3e-70328538c4c6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.635947 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-client-ca" (OuterVolumeSpecName: "client-ca") pod "180dc212-b361-48e7-ad3e-70328538c4c6" (UID: "180dc212-b361-48e7-ad3e-70328538c4c6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.636130 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-config" (OuterVolumeSpecName: "config") pod "305d9e46-16ee-4444-a4af-c58ac4c014f2" (UID: "305d9e46-16ee-4444-a4af-c58ac4c014f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.636384 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-client-ca" (OuterVolumeSpecName: "client-ca") pod "305d9e46-16ee-4444-a4af-c58ac4c014f2" (UID: "305d9e46-16ee-4444-a4af-c58ac4c014f2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.642189 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/180dc212-b361-48e7-ad3e-70328538c4c6-kube-api-access-ppjss" (OuterVolumeSpecName: "kube-api-access-ppjss") pod "180dc212-b361-48e7-ad3e-70328538c4c6" (UID: "180dc212-b361-48e7-ad3e-70328538c4c6"). InnerVolumeSpecName "kube-api-access-ppjss". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.645268 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/305d9e46-16ee-4444-a4af-c58ac4c014f2-kube-api-access-t9xpx" (OuterVolumeSpecName: "kube-api-access-t9xpx") pod "305d9e46-16ee-4444-a4af-c58ac4c014f2" (UID: "305d9e46-16ee-4444-a4af-c58ac4c014f2"). InnerVolumeSpecName "kube-api-access-t9xpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.645323 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/305d9e46-16ee-4444-a4af-c58ac4c014f2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "305d9e46-16ee-4444-a4af-c58ac4c014f2" (UID: "305d9e46-16ee-4444-a4af-c58ac4c014f2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.648467 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180dc212-b361-48e7-ad3e-70328538c4c6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "180dc212-b361-48e7-ad3e-70328538c4c6" (UID: "180dc212-b361-48e7-ad3e-70328538c4c6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.722518 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.722551 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" event={"ID":"305d9e46-16ee-4444-a4af-c58ac4c014f2","Type":"ContainerDied","Data":"7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c"} Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.722460 4721 generic.go:334] "Generic (PLEG): container finished" podID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerID="7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c" exitCode=0 Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.722711 4721 scope.go:117] "RemoveContainer" containerID="7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.722751 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dmhsg" event={"ID":"305d9e46-16ee-4444-a4af-c58ac4c014f2","Type":"ContainerDied","Data":"5b41361b30386b810a519e14ffcb90581184543121fb80fea80b5a6fb65e3b9b"} Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.724852 4721 generic.go:334] "Generic (PLEG): container finished" podID="180dc212-b361-48e7-ad3e-70328538c4c6" containerID="948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf" exitCode=0 Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.724896 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" event={"ID":"180dc212-b361-48e7-ad3e-70328538c4c6","Type":"ContainerDied","Data":"948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf"} Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.724911 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.724924 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl" event={"ID":"180dc212-b361-48e7-ad3e-70328538c4c6","Type":"ContainerDied","Data":"731151043c96423f0e1e1f9c61598998cfdb75ca3dce96bb70aaedc223c48364"} Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736656 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736682 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/305d9e46-16ee-4444-a4af-c58ac4c014f2-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736696 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9xpx\" (UniqueName: \"kubernetes.io/projected/305d9e46-16ee-4444-a4af-c58ac4c014f2-kube-api-access-t9xpx\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736708 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppjss\" (UniqueName: \"kubernetes.io/projected/180dc212-b361-48e7-ad3e-70328538c4c6-kube-api-access-ppjss\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736719 4721 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736730 4721 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736740 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/305d9e46-16ee-4444-a4af-c58ac4c014f2-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736752 4721 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/180dc212-b361-48e7-ad3e-70328538c4c6-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.736763 4721 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/180dc212-b361-48e7-ad3e-70328538c4c6-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.775127 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dmhsg"] Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.782713 4721 scope.go:117] "RemoveContainer" containerID="7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c" Nov 21 13:48:40 crc kubenswrapper[4721]: E1121 13:48:40.787578 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c\": container with ID starting with 7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c not found: ID does not exist" containerID="7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.787646 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c"} err="failed to get container status \"7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c\": rpc error: code = NotFound desc = could not find container \"7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c\": container with ID starting with 7503b9a62ab55bec42ae12a811b8b98a3f4bd794d3d86b3cd02ece5053c7d97c not found: ID does not exist" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.787681 4721 scope.go:117] "RemoveContainer" containerID="948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.788458 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dmhsg"] Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.819920 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl"] Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.819967 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r49kl"] Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.821226 4721 scope.go:117] "RemoveContainer" containerID="948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf" Nov 21 13:48:40 crc kubenswrapper[4721]: E1121 13:48:40.829213 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf\": container with ID starting with 948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf not found: ID does not exist" containerID="948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.829264 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf"} err="failed to get container status \"948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf\": rpc error: code = NotFound desc = could not find container \"948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf\": container with ID starting with 948d27f8f981de9fc53c500c5677e51c7f4bc44a17c6c4ac29b3f8e45e86f2bf not found: ID does not exist" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.992982 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="180dc212-b361-48e7-ad3e-70328538c4c6" path="/var/lib/kubelet/pods/180dc212-b361-48e7-ad3e-70328538c4c6/volumes" Nov 21 13:48:40 crc kubenswrapper[4721]: I1121 13:48:40.993746 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" path="/var/lib/kubelet/pods/305d9e46-16ee-4444-a4af-c58ac4c014f2/volumes" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.116428 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p"] Nov 21 13:48:41 crc kubenswrapper[4721]: E1121 13:48:41.116777 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.116806 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" Nov 21 13:48:41 crc kubenswrapper[4721]: E1121 13:48:41.116829 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180dc212-b361-48e7-ad3e-70328538c4c6" containerName="route-controller-manager" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.116843 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="180dc212-b361-48e7-ad3e-70328538c4c6" containerName="route-controller-manager" Nov 21 13:48:41 crc kubenswrapper[4721]: E1121 13:48:41.116864 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="173c0bff-958a-4565-a578-9d62cef76971" containerName="collect-profiles" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.116878 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="173c0bff-958a-4565-a578-9d62cef76971" containerName="collect-profiles" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.117048 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="180dc212-b361-48e7-ad3e-70328538c4c6" containerName="route-controller-manager" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.117110 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="173c0bff-958a-4565-a578-9d62cef76971" containerName="collect-profiles" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.117130 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="305d9e46-16ee-4444-a4af-c58ac4c014f2" containerName="controller-manager" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.117735 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.122799 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.123746 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.123917 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.124133 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.124309 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.125192 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.126987 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx"] Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.127997 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.130267 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.130855 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.130870 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.131299 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.132158 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.132558 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.133338 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.137593 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p"] Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.146819 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx"] Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242102 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f1a151b-21be-4793-9363-6fa8e23e7193-serving-cert\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242155 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-client-ca\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242178 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/896cce59-6731-481f-8842-85daff531781-config\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242197 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x5jq\" (UniqueName: \"kubernetes.io/projected/896cce59-6731-481f-8842-85daff531781-kube-api-access-9x5jq\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242248 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/896cce59-6731-481f-8842-85daff531781-client-ca\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242277 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shx5g\" (UniqueName: \"kubernetes.io/projected/6f1a151b-21be-4793-9363-6fa8e23e7193-kube-api-access-shx5g\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242358 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/896cce59-6731-481f-8842-85daff531781-serving-cert\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242459 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-config\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.242491 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-proxy-ca-bundles\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343557 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shx5g\" (UniqueName: \"kubernetes.io/projected/6f1a151b-21be-4793-9363-6fa8e23e7193-kube-api-access-shx5g\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343634 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/896cce59-6731-481f-8842-85daff531781-serving-cert\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343668 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-config\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343695 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-proxy-ca-bundles\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343734 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f1a151b-21be-4793-9363-6fa8e23e7193-serving-cert\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343758 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-client-ca\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343783 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/896cce59-6731-481f-8842-85daff531781-config\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343804 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x5jq\" (UniqueName: \"kubernetes.io/projected/896cce59-6731-481f-8842-85daff531781-kube-api-access-9x5jq\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.343839 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/896cce59-6731-481f-8842-85daff531781-client-ca\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.345126 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/896cce59-6731-481f-8842-85daff531781-config\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.345782 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/896cce59-6731-481f-8842-85daff531781-client-ca\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.346237 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-config\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.346273 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-proxy-ca-bundles\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.346580 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6f1a151b-21be-4793-9363-6fa8e23e7193-client-ca\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.352332 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/896cce59-6731-481f-8842-85daff531781-serving-cert\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.352422 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6f1a151b-21be-4793-9363-6fa8e23e7193-serving-cert\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.366138 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x5jq\" (UniqueName: \"kubernetes.io/projected/896cce59-6731-481f-8842-85daff531781-kube-api-access-9x5jq\") pod \"route-controller-manager-665c8f7986-l7vmx\" (UID: \"896cce59-6731-481f-8842-85daff531781\") " pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.375627 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shx5g\" (UniqueName: \"kubernetes.io/projected/6f1a151b-21be-4793-9363-6fa8e23e7193-kube-api-access-shx5g\") pod \"controller-manager-5c668f5c8b-pzx8p\" (UID: \"6f1a151b-21be-4793-9363-6fa8e23e7193\") " pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.458888 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.475456 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.740248 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx"] Nov 21 13:48:41 crc kubenswrapper[4721]: I1121 13:48:41.901646 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p"] Nov 21 13:48:41 crc kubenswrapper[4721]: W1121 13:48:41.912250 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f1a151b_21be_4793_9363_6fa8e23e7193.slice/crio-8fdaf81d4e3f212f45e91ac49c2c2df2112f40da824c70cb7b48a0fc110f041a WatchSource:0}: Error finding container 8fdaf81d4e3f212f45e91ac49c2c2df2112f40da824c70cb7b48a0fc110f041a: Status 404 returned error can't find the container with id 8fdaf81d4e3f212f45e91ac49c2c2df2112f40da824c70cb7b48a0fc110f041a Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.737468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" event={"ID":"896cce59-6731-481f-8842-85daff531781","Type":"ContainerStarted","Data":"7ff060b5d05a71182c342e93191b5604f235bf2d9d2d6d188befa9702fc7f787"} Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.738118 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" event={"ID":"896cce59-6731-481f-8842-85daff531781","Type":"ContainerStarted","Data":"f1cf9914f9a8248de0a94b2506cd4845a1c316cff051c2b67938f467776bd4a8"} Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.739640 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.744015 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.744202 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" event={"ID":"6f1a151b-21be-4793-9363-6fa8e23e7193","Type":"ContainerStarted","Data":"1a17996e16f2383e8b89e2c1e606d7d435a76933c815487bb67671b610431ddb"} Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.744227 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" event={"ID":"6f1a151b-21be-4793-9363-6fa8e23e7193","Type":"ContainerStarted","Data":"8fdaf81d4e3f212f45e91ac49c2c2df2112f40da824c70cb7b48a0fc110f041a"} Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.745138 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.749648 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.762052 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-665c8f7986-l7vmx" podStartSLOduration=3.762038016 podStartE2EDuration="3.762038016s" podCreationTimestamp="2025-11-21 13:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:48:42.759781887 +0000 UTC m=+760.737673145" watchObservedRunningTime="2025-11-21 13:48:42.762038016 +0000 UTC m=+760.739929274" Nov 21 13:48:42 crc kubenswrapper[4721]: I1121 13:48:42.784118 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c668f5c8b-pzx8p" podStartSLOduration=3.784081972 podStartE2EDuration="3.784081972s" podCreationTimestamp="2025-11-21 13:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:48:42.776637417 +0000 UTC m=+760.754528695" watchObservedRunningTime="2025-11-21 13:48:42.784081972 +0000 UTC m=+760.761973240" Nov 21 13:48:50 crc kubenswrapper[4721]: I1121 13:48:50.062744 4721 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 21 13:49:05 crc kubenswrapper[4721]: I1121 13:49:05.262435 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:49:05 crc kubenswrapper[4721]: I1121 13:49:05.263019 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.664359 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rc7kq"] Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.666666 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.677723 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rc7kq"] Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.745081 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjp84\" (UniqueName: \"kubernetes.io/projected/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-kube-api-access-rjp84\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.745369 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-utilities\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.745527 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-catalog-content\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.847194 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjp84\" (UniqueName: \"kubernetes.io/projected/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-kube-api-access-rjp84\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.847272 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-utilities\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.847352 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-catalog-content\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.847895 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-catalog-content\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.848292 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-utilities\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:19 crc kubenswrapper[4721]: I1121 13:49:19.869105 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjp84\" (UniqueName: \"kubernetes.io/projected/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-kube-api-access-rjp84\") pod \"certified-operators-rc7kq\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:20 crc kubenswrapper[4721]: I1121 13:49:20.007946 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:20 crc kubenswrapper[4721]: I1121 13:49:20.478904 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rc7kq"] Nov 21 13:49:21 crc kubenswrapper[4721]: I1121 13:49:21.001654 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rc7kq" event={"ID":"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52","Type":"ContainerStarted","Data":"aeb6b80651a11c2e89f729562a8847d8f10deefd3f99a1cc13986eb93b1e26cc"} Nov 21 13:49:22 crc kubenswrapper[4721]: I1121 13:49:22.011469 4721 generic.go:334] "Generic (PLEG): container finished" podID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerID="1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452" exitCode=0 Nov 21 13:49:22 crc kubenswrapper[4721]: I1121 13:49:22.011523 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rc7kq" event={"ID":"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52","Type":"ContainerDied","Data":"1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452"} Nov 21 13:49:22 crc kubenswrapper[4721]: I1121 13:49:22.015733 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 13:49:24 crc kubenswrapper[4721]: I1121 13:49:24.032695 4721 generic.go:334] "Generic (PLEG): container finished" podID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerID="4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96" exitCode=0 Nov 21 13:49:24 crc kubenswrapper[4721]: I1121 13:49:24.032773 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rc7kq" event={"ID":"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52","Type":"ContainerDied","Data":"4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96"} Nov 21 13:49:27 crc kubenswrapper[4721]: I1121 13:49:27.053254 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rc7kq" event={"ID":"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52","Type":"ContainerStarted","Data":"ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b"} Nov 21 13:49:28 crc kubenswrapper[4721]: I1121 13:49:28.083744 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rc7kq" podStartSLOduration=4.860531564 podStartE2EDuration="9.08368356s" podCreationTimestamp="2025-11-21 13:49:19 +0000 UTC" firstStartedPulling="2025-11-21 13:49:22.015338706 +0000 UTC m=+799.993229974" lastFinishedPulling="2025-11-21 13:49:26.238490692 +0000 UTC m=+804.216381970" observedRunningTime="2025-11-21 13:49:28.081719899 +0000 UTC m=+806.059611197" watchObservedRunningTime="2025-11-21 13:49:28.08368356 +0000 UTC m=+806.061574828" Nov 21 13:49:30 crc kubenswrapper[4721]: I1121 13:49:30.008925 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:30 crc kubenswrapper[4721]: I1121 13:49:30.009038 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:30 crc kubenswrapper[4721]: I1121 13:49:30.053056 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:35 crc kubenswrapper[4721]: I1121 13:49:35.261894 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:49:35 crc kubenswrapper[4721]: I1121 13:49:35.262551 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:49:35 crc kubenswrapper[4721]: I1121 13:49:35.262636 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:49:35 crc kubenswrapper[4721]: I1121 13:49:35.263582 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3c25faaf13321f5c86af730ca041b03706f375a923a1b990e773cef7516bc285"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:49:35 crc kubenswrapper[4721]: I1121 13:49:35.263665 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://3c25faaf13321f5c86af730ca041b03706f375a923a1b990e773cef7516bc285" gracePeriod=600 Nov 21 13:49:36 crc kubenswrapper[4721]: I1121 13:49:36.110909 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="3c25faaf13321f5c86af730ca041b03706f375a923a1b990e773cef7516bc285" exitCode=0 Nov 21 13:49:36 crc kubenswrapper[4721]: I1121 13:49:36.110963 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"3c25faaf13321f5c86af730ca041b03706f375a923a1b990e773cef7516bc285"} Nov 21 13:49:36 crc kubenswrapper[4721]: I1121 13:49:36.111008 4721 scope.go:117] "RemoveContainer" containerID="2e0824dfbe9576b06ae1544acece2d07845bf0e4ae9bdf32618bab9f29b83047" Nov 21 13:49:37 crc kubenswrapper[4721]: I1121 13:49:37.118990 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"71a86cc2f09fdb3301718c24b412995cd6f1788d273d3c5f0145c7b6b096533f"} Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.048407 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.089725 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rc7kq"] Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.143782 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rc7kq" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="registry-server" containerID="cri-o://ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b" gracePeriod=2 Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.541055 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.718436 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjp84\" (UniqueName: \"kubernetes.io/projected/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-kube-api-access-rjp84\") pod \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.718824 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-catalog-content\") pod \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.718859 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-utilities\") pod \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\" (UID: \"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52\") " Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.719692 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-utilities" (OuterVolumeSpecName: "utilities") pod "4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" (UID: "4d1f8281-00d3-4b8c-a3fb-d9acc7562e52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.723740 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-kube-api-access-rjp84" (OuterVolumeSpecName: "kube-api-access-rjp84") pod "4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" (UID: "4d1f8281-00d3-4b8c-a3fb-d9acc7562e52"). InnerVolumeSpecName "kube-api-access-rjp84". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.765917 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" (UID: "4d1f8281-00d3-4b8c-a3fb-d9acc7562e52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.820650 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjp84\" (UniqueName: \"kubernetes.io/projected/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-kube-api-access-rjp84\") on node \"crc\" DevicePath \"\"" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.820693 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:49:40 crc kubenswrapper[4721]: I1121 13:49:40.820737 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.151044 4721 generic.go:334] "Generic (PLEG): container finished" podID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerID="ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b" exitCode=0 Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.151139 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rc7kq" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.151158 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rc7kq" event={"ID":"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52","Type":"ContainerDied","Data":"ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b"} Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.152677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rc7kq" event={"ID":"4d1f8281-00d3-4b8c-a3fb-d9acc7562e52","Type":"ContainerDied","Data":"aeb6b80651a11c2e89f729562a8847d8f10deefd3f99a1cc13986eb93b1e26cc"} Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.152708 4721 scope.go:117] "RemoveContainer" containerID="ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.179611 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rc7kq"] Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.179730 4721 scope.go:117] "RemoveContainer" containerID="4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.181351 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rc7kq"] Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.198974 4721 scope.go:117] "RemoveContainer" containerID="1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.212716 4721 scope.go:117] "RemoveContainer" containerID="ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b" Nov 21 13:49:41 crc kubenswrapper[4721]: E1121 13:49:41.213285 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b\": container with ID starting with ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b not found: ID does not exist" containerID="ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.213332 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b"} err="failed to get container status \"ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b\": rpc error: code = NotFound desc = could not find container \"ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b\": container with ID starting with ca1b75e98f536baaf348f3891a2a10b3dc4086983a9ee5037a1e067adba6053b not found: ID does not exist" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.213361 4721 scope.go:117] "RemoveContainer" containerID="4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96" Nov 21 13:49:41 crc kubenswrapper[4721]: E1121 13:49:41.213711 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96\": container with ID starting with 4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96 not found: ID does not exist" containerID="4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.213748 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96"} err="failed to get container status \"4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96\": rpc error: code = NotFound desc = could not find container \"4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96\": container with ID starting with 4288065f205b9e11b89e2c1aaeb8bfc4657f70a5398ad2ffba7a932cb6926c96 not found: ID does not exist" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.213771 4721 scope.go:117] "RemoveContainer" containerID="1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452" Nov 21 13:49:41 crc kubenswrapper[4721]: E1121 13:49:41.214119 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452\": container with ID starting with 1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452 not found: ID does not exist" containerID="1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452" Nov 21 13:49:41 crc kubenswrapper[4721]: I1121 13:49:41.214265 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452"} err="failed to get container status \"1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452\": rpc error: code = NotFound desc = could not find container \"1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452\": container with ID starting with 1cc9a54b8df37412635689a84f791a2d12681876fad63cb8812dbfcfabf6f452 not found: ID does not exist" Nov 21 13:49:42 crc kubenswrapper[4721]: I1121 13:49:42.993054 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" path="/var/lib/kubelet/pods/4d1f8281-00d3-4b8c-a3fb-d9acc7562e52/volumes" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.321279 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gpb6f"] Nov 21 13:49:51 crc kubenswrapper[4721]: E1121 13:49:51.321952 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="extract-utilities" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.321964 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="extract-utilities" Nov 21 13:49:51 crc kubenswrapper[4721]: E1121 13:49:51.321981 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="registry-server" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.321987 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="registry-server" Nov 21 13:49:51 crc kubenswrapper[4721]: E1121 13:49:51.322001 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="extract-content" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.322007 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="extract-content" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.322124 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d1f8281-00d3-4b8c-a3fb-d9acc7562e52" containerName="registry-server" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.322777 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.337722 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpb6f"] Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.471533 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phf26\" (UniqueName: \"kubernetes.io/projected/94e44784-4789-4b36-b6af-a666bf8791b0-kube-api-access-phf26\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.471590 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-catalog-content\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.471636 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-utilities\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.573444 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phf26\" (UniqueName: \"kubernetes.io/projected/94e44784-4789-4b36-b6af-a666bf8791b0-kube-api-access-phf26\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.573542 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-catalog-content\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.573612 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-utilities\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.574161 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-catalog-content\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.574234 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-utilities\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.605460 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phf26\" (UniqueName: \"kubernetes.io/projected/94e44784-4789-4b36-b6af-a666bf8791b0-kube-api-access-phf26\") pod \"redhat-marketplace-gpb6f\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:51 crc kubenswrapper[4721]: I1121 13:49:51.677132 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:49:52 crc kubenswrapper[4721]: I1121 13:49:52.079773 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpb6f"] Nov 21 13:49:52 crc kubenswrapper[4721]: W1121 13:49:52.084896 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94e44784_4789_4b36_b6af_a666bf8791b0.slice/crio-e19ef25cadef152ba506b9c13bee8400a52b5d7b28fc0106ddc9e0b15c460477 WatchSource:0}: Error finding container e19ef25cadef152ba506b9c13bee8400a52b5d7b28fc0106ddc9e0b15c460477: Status 404 returned error can't find the container with id e19ef25cadef152ba506b9c13bee8400a52b5d7b28fc0106ddc9e0b15c460477 Nov 21 13:49:52 crc kubenswrapper[4721]: I1121 13:49:52.215101 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpb6f" event={"ID":"94e44784-4789-4b36-b6af-a666bf8791b0","Type":"ContainerStarted","Data":"e19ef25cadef152ba506b9c13bee8400a52b5d7b28fc0106ddc9e0b15c460477"} Nov 21 13:49:53 crc kubenswrapper[4721]: I1121 13:49:53.226995 4721 generic.go:334] "Generic (PLEG): container finished" podID="94e44784-4789-4b36-b6af-a666bf8791b0" containerID="f0884b98e830bb59d4c9396d93aba71b19b5a569cc4c9771575be7332083d64f" exitCode=0 Nov 21 13:49:53 crc kubenswrapper[4721]: I1121 13:49:53.227115 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpb6f" event={"ID":"94e44784-4789-4b36-b6af-a666bf8791b0","Type":"ContainerDied","Data":"f0884b98e830bb59d4c9396d93aba71b19b5a569cc4c9771575be7332083d64f"} Nov 21 13:49:55 crc kubenswrapper[4721]: I1121 13:49:55.266704 4721 generic.go:334] "Generic (PLEG): container finished" podID="94e44784-4789-4b36-b6af-a666bf8791b0" containerID="1711d191e20f6b4d7574eee9fa0b50e0f37e50e1c6d27e72f436ea071e6d8456" exitCode=0 Nov 21 13:49:55 crc kubenswrapper[4721]: I1121 13:49:55.266819 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpb6f" event={"ID":"94e44784-4789-4b36-b6af-a666bf8791b0","Type":"ContainerDied","Data":"1711d191e20f6b4d7574eee9fa0b50e0f37e50e1c6d27e72f436ea071e6d8456"} Nov 21 13:49:57 crc kubenswrapper[4721]: I1121 13:49:57.796694 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpb6f" event={"ID":"94e44784-4789-4b36-b6af-a666bf8791b0","Type":"ContainerStarted","Data":"efbc17c30a3682ef4437d5193a7b235cf8c7e0a67fb17f913be7eb46f0863f97"} Nov 21 13:49:57 crc kubenswrapper[4721]: I1121 13:49:57.827054 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gpb6f" podStartSLOduration=2.762354849 podStartE2EDuration="6.827030575s" podCreationTimestamp="2025-11-21 13:49:51 +0000 UTC" firstStartedPulling="2025-11-21 13:49:53.229528779 +0000 UTC m=+831.207420037" lastFinishedPulling="2025-11-21 13:49:57.294204465 +0000 UTC m=+835.272095763" observedRunningTime="2025-11-21 13:49:57.823926133 +0000 UTC m=+835.801817421" watchObservedRunningTime="2025-11-21 13:49:57.827030575 +0000 UTC m=+835.804921843" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.072750 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s6g8d"] Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.073906 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.087133 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s6g8d"] Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.198739 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-utilities\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.199148 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-catalog-content\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.199247 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9pgm\" (UniqueName: \"kubernetes.io/projected/dc27d915-7f27-4433-a93f-54115b72bc0b-kube-api-access-h9pgm\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.300331 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-catalog-content\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.300419 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9pgm\" (UniqueName: \"kubernetes.io/projected/dc27d915-7f27-4433-a93f-54115b72bc0b-kube-api-access-h9pgm\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.300468 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-utilities\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.300864 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-catalog-content\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.301003 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-utilities\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.318718 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9pgm\" (UniqueName: \"kubernetes.io/projected/dc27d915-7f27-4433-a93f-54115b72bc0b-kube-api-access-h9pgm\") pod \"redhat-operators-s6g8d\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.432671 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:49:59 crc kubenswrapper[4721]: I1121 13:49:59.848031 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s6g8d"] Nov 21 13:50:00 crc kubenswrapper[4721]: I1121 13:50:00.816417 4721 generic.go:334] "Generic (PLEG): container finished" podID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerID="f21f877ef7303ed1a29f35e7f6367f1311de6f7720f7bc697c3834b840fd7c43" exitCode=0 Nov 21 13:50:00 crc kubenswrapper[4721]: I1121 13:50:00.816538 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s6g8d" event={"ID":"dc27d915-7f27-4433-a93f-54115b72bc0b","Type":"ContainerDied","Data":"f21f877ef7303ed1a29f35e7f6367f1311de6f7720f7bc697c3834b840fd7c43"} Nov 21 13:50:00 crc kubenswrapper[4721]: I1121 13:50:00.816925 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s6g8d" event={"ID":"dc27d915-7f27-4433-a93f-54115b72bc0b","Type":"ContainerStarted","Data":"7651d7f582eb0efc7557ae69e21978c3efa7aa6d9e88b4e9b3d71b1c5979e953"} Nov 21 13:50:01 crc kubenswrapper[4721]: I1121 13:50:01.677795 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:50:01 crc kubenswrapper[4721]: I1121 13:50:01.678323 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:50:01 crc kubenswrapper[4721]: I1121 13:50:01.762263 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:50:02 crc kubenswrapper[4721]: I1121 13:50:02.897236 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:50:03 crc kubenswrapper[4721]: I1121 13:50:03.455378 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpb6f"] Nov 21 13:50:04 crc kubenswrapper[4721]: I1121 13:50:04.838322 4721 generic.go:334] "Generic (PLEG): container finished" podID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerID="e7792b799c2a37c230b914685b828ebf00876ddea049796997b739b295ee705a" exitCode=0 Nov 21 13:50:04 crc kubenswrapper[4721]: I1121 13:50:04.838387 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s6g8d" event={"ID":"dc27d915-7f27-4433-a93f-54115b72bc0b","Type":"ContainerDied","Data":"e7792b799c2a37c230b914685b828ebf00876ddea049796997b739b295ee705a"} Nov 21 13:50:04 crc kubenswrapper[4721]: I1121 13:50:04.838805 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gpb6f" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="registry-server" containerID="cri-o://efbc17c30a3682ef4437d5193a7b235cf8c7e0a67fb17f913be7eb46f0863f97" gracePeriod=2 Nov 21 13:50:05 crc kubenswrapper[4721]: I1121 13:50:05.847297 4721 generic.go:334] "Generic (PLEG): container finished" podID="94e44784-4789-4b36-b6af-a666bf8791b0" containerID="efbc17c30a3682ef4437d5193a7b235cf8c7e0a67fb17f913be7eb46f0863f97" exitCode=0 Nov 21 13:50:05 crc kubenswrapper[4721]: I1121 13:50:05.847354 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpb6f" event={"ID":"94e44784-4789-4b36-b6af-a666bf8791b0","Type":"ContainerDied","Data":"efbc17c30a3682ef4437d5193a7b235cf8c7e0a67fb17f913be7eb46f0863f97"} Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.843941 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.855451 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpb6f" event={"ID":"94e44784-4789-4b36-b6af-a666bf8791b0","Type":"ContainerDied","Data":"e19ef25cadef152ba506b9c13bee8400a52b5d7b28fc0106ddc9e0b15c460477"} Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.855519 4721 scope.go:117] "RemoveContainer" containerID="efbc17c30a3682ef4437d5193a7b235cf8c7e0a67fb17f913be7eb46f0863f97" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.855677 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpb6f" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.884701 4721 scope.go:117] "RemoveContainer" containerID="1711d191e20f6b4d7574eee9fa0b50e0f37e50e1c6d27e72f436ea071e6d8456" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.897343 4721 scope.go:117] "RemoveContainer" containerID="f0884b98e830bb59d4c9396d93aba71b19b5a569cc4c9771575be7332083d64f" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.911394 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phf26\" (UniqueName: \"kubernetes.io/projected/94e44784-4789-4b36-b6af-a666bf8791b0-kube-api-access-phf26\") pod \"94e44784-4789-4b36-b6af-a666bf8791b0\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.911480 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-catalog-content\") pod \"94e44784-4789-4b36-b6af-a666bf8791b0\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.912022 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-utilities\") pod \"94e44784-4789-4b36-b6af-a666bf8791b0\" (UID: \"94e44784-4789-4b36-b6af-a666bf8791b0\") " Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.912770 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-utilities" (OuterVolumeSpecName: "utilities") pod "94e44784-4789-4b36-b6af-a666bf8791b0" (UID: "94e44784-4789-4b36-b6af-a666bf8791b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.916161 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.920654 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94e44784-4789-4b36-b6af-a666bf8791b0-kube-api-access-phf26" (OuterVolumeSpecName: "kube-api-access-phf26") pod "94e44784-4789-4b36-b6af-a666bf8791b0" (UID: "94e44784-4789-4b36-b6af-a666bf8791b0"). InnerVolumeSpecName "kube-api-access-phf26". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:50:06 crc kubenswrapper[4721]: I1121 13:50:06.937698 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94e44784-4789-4b36-b6af-a666bf8791b0" (UID: "94e44784-4789-4b36-b6af-a666bf8791b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:07 crc kubenswrapper[4721]: I1121 13:50:07.017754 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phf26\" (UniqueName: \"kubernetes.io/projected/94e44784-4789-4b36-b6af-a666bf8791b0-kube-api-access-phf26\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:07 crc kubenswrapper[4721]: I1121 13:50:07.017786 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94e44784-4789-4b36-b6af-a666bf8791b0-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:07 crc kubenswrapper[4721]: I1121 13:50:07.183196 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpb6f"] Nov 21 13:50:07 crc kubenswrapper[4721]: I1121 13:50:07.189145 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpb6f"] Nov 21 13:50:07 crc kubenswrapper[4721]: I1121 13:50:07.864545 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s6g8d" event={"ID":"dc27d915-7f27-4433-a93f-54115b72bc0b","Type":"ContainerStarted","Data":"4e993c585f53aedc0589997753d49efb946b7f694a97addc177451a8027590e1"} Nov 21 13:50:07 crc kubenswrapper[4721]: I1121 13:50:07.883997 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s6g8d" podStartSLOduration=2.143425033 podStartE2EDuration="8.883979203s" podCreationTimestamp="2025-11-21 13:49:59 +0000 UTC" firstStartedPulling="2025-11-21 13:50:00.820751563 +0000 UTC m=+838.798642851" lastFinishedPulling="2025-11-21 13:50:07.561305753 +0000 UTC m=+845.539197021" observedRunningTime="2025-11-21 13:50:07.88120489 +0000 UTC m=+845.859096178" watchObservedRunningTime="2025-11-21 13:50:07.883979203 +0000 UTC m=+845.861870461" Nov 21 13:50:08 crc kubenswrapper[4721]: I1121 13:50:08.988472 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" path="/var/lib/kubelet/pods/94e44784-4789-4b36-b6af-a666bf8791b0/volumes" Nov 21 13:50:09 crc kubenswrapper[4721]: I1121 13:50:09.433284 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:50:09 crc kubenswrapper[4721]: I1121 13:50:09.433355 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:50:10 crc kubenswrapper[4721]: I1121 13:50:10.500688 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-s6g8d" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="registry-server" probeResult="failure" output=< Nov 21 13:50:10 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Nov 21 13:50:10 crc kubenswrapper[4721]: > Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.315771 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-frn77"] Nov 21 13:50:12 crc kubenswrapper[4721]: E1121 13:50:12.316477 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="registry-server" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.316500 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="registry-server" Nov 21 13:50:12 crc kubenswrapper[4721]: E1121 13:50:12.316529 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="extract-utilities" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.316542 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="extract-utilities" Nov 21 13:50:12 crc kubenswrapper[4721]: E1121 13:50:12.316564 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="extract-content" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.316579 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="extract-content" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.316771 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="94e44784-4789-4b36-b6af-a666bf8791b0" containerName="registry-server" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.318312 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.338605 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-frn77"] Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.396993 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-catalog-content\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.397204 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvs4m\" (UniqueName: \"kubernetes.io/projected/3b78bc30-b79c-4091-8027-f430db02b2de-kube-api-access-nvs4m\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.397284 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-utilities\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.498624 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-catalog-content\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.498728 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvs4m\" (UniqueName: \"kubernetes.io/projected/3b78bc30-b79c-4091-8027-f430db02b2de-kube-api-access-nvs4m\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.498775 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-utilities\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.499218 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-catalog-content\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.499226 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-utilities\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.527978 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvs4m\" (UniqueName: \"kubernetes.io/projected/3b78bc30-b79c-4091-8027-f430db02b2de-kube-api-access-nvs4m\") pod \"community-operators-frn77\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.638961 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:12 crc kubenswrapper[4721]: I1121 13:50:12.932889 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-frn77"] Nov 21 13:50:12 crc kubenswrapper[4721]: W1121 13:50:12.956222 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b78bc30_b79c_4091_8027_f430db02b2de.slice/crio-b586804bfe94461eb6aed244998b3428894a1c094f7c350e44be422130df8fc9 WatchSource:0}: Error finding container b586804bfe94461eb6aed244998b3428894a1c094f7c350e44be422130df8fc9: Status 404 returned error can't find the container with id b586804bfe94461eb6aed244998b3428894a1c094f7c350e44be422130df8fc9 Nov 21 13:50:13 crc kubenswrapper[4721]: I1121 13:50:13.902789 4721 generic.go:334] "Generic (PLEG): container finished" podID="3b78bc30-b79c-4091-8027-f430db02b2de" containerID="ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16" exitCode=0 Nov 21 13:50:13 crc kubenswrapper[4721]: I1121 13:50:13.902850 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frn77" event={"ID":"3b78bc30-b79c-4091-8027-f430db02b2de","Type":"ContainerDied","Data":"ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16"} Nov 21 13:50:13 crc kubenswrapper[4721]: I1121 13:50:13.903264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frn77" event={"ID":"3b78bc30-b79c-4091-8027-f430db02b2de","Type":"ContainerStarted","Data":"b586804bfe94461eb6aed244998b3428894a1c094f7c350e44be422130df8fc9"} Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.702443 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x6mh9"] Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703107 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="northd" containerID="cri-o://9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703179 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="sbdb" containerID="cri-o://f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703252 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="nbdb" containerID="cri-o://3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703467 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-node" containerID="cri-o://7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703517 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703628 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-controller" containerID="cri-o://0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.703631 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-acl-logging" containerID="cri-o://f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.737073 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" containerID="cri-o://2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" gracePeriod=30 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.915539 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/3.log" Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.917957 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovn-acl-logging/0.log" Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.919047 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" exitCode=143 Nov 21 13:50:14 crc kubenswrapper[4721]: I1121 13:50:14.919120 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.317662 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 is running failed: container process not found" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.317688 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb is running failed: container process not found" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.318610 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb is running failed: container process not found" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.318648 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 is running failed: container process not found" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.319482 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb is running failed: container process not found" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.319532 4721 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="nbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.319487 4721 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 is running failed: container process not found" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.319599 4721 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 is running failed: container process not found" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="sbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.633580 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/3.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.637534 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovn-acl-logging/0.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.638651 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovn-controller/0.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.639473 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699309 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pg5jp"] Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699561 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="sbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699574 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="sbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699588 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-acl-logging" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699597 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-acl-logging" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699607 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699615 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699622 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699630 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699637 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="nbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699644 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="nbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699654 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-node" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699669 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-node" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699679 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kubecfg-setup" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699687 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kubecfg-setup" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699703 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-ovn-metrics" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699711 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-ovn-metrics" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699721 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="northd" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699728 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="northd" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699738 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699745 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.699756 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699764 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699884 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699895 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699903 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-node" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699916 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699925 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699938 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovn-acl-logging" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699949 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="sbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699959 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="kube-rbac-proxy-ovn-metrics" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699971 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="northd" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.699982 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="nbdb" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.700123 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.700133 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: E1121 13:50:15.700151 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.700159 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.700272 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.700287 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerName="ovnkube-controller" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.702505 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743826 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-openvswitch\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743861 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-etc-openvswitch\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743882 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-node-log\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743911 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-kubelet\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743947 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-systemd\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743968 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.743962 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744031 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-env-overrides\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744054 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-netns\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744043 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-node-log" (OuterVolumeSpecName: "node-log") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744056 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744113 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-log-socket" (OuterVolumeSpecName: "log-socket") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744121 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744087 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-log-socket\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744150 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-ovn-kubernetes\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744170 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-ovn\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744190 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-systemd-units\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744222 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzjfx\" (UniqueName: \"kubernetes.io/projected/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-kube-api-access-xzjfx\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744254 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-var-lib-openvswitch\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744252 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744293 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744299 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-config\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744383 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovn-node-metrics-cert\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744409 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-script-lib\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744447 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-netd\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744469 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744499 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-slash\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744524 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744555 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744529 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-bin\") pod \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\" (UID: \"7db08ddd-c14b-40b0-a2fd-67f84d6e747a\") " Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744679 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744720 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744742 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744764 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-ovn\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744810 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-systemd-units\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744837 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-systemd\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744864 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-kubelet\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744908 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgvgv\" (UniqueName: \"kubernetes.io/projected/9974814c-932d-4ceb-85f1-4670f7cbdf84-kube-api-access-kgvgv\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744935 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-etc-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745006 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-var-lib-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745027 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-cni-bin\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745086 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovnkube-script-lib\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745110 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-log-socket\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745162 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovn-node-metrics-cert\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745192 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-run-netns\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745227 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745265 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-run-ovn-kubernetes\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745290 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-slash\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.744767 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745269 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745323 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745345 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-slash" (OuterVolumeSpecName: "host-slash") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745362 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-env-overrides\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745517 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-node-log\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745630 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-cni-netd\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745665 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovnkube-config\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745810 4721 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745827 4721 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745839 4721 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-node-log\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745854 4721 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745872 4721 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745884 4721 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745896 4721 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-log-socket\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745909 4721 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745921 4721 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745933 4721 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745962 4721 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745974 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745985 4721 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.745999 4721 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.746012 4721 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.746025 4721 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-slash\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.746039 4721 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.750214 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-kube-api-access-xzjfx" (OuterVolumeSpecName: "kube-api-access-xzjfx") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "kube-api-access-xzjfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.752790 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.761549 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "7db08ddd-c14b-40b0-a2fd-67f84d6e747a" (UID: "7db08ddd-c14b-40b0-a2fd-67f84d6e747a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.846949 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-env-overrides\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847016 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-node-log\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847040 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-cni-netd\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847058 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovnkube-config\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847107 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-ovn\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847138 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-systemd-units\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847148 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-cni-netd\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847159 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-systemd\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847189 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-systemd\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847208 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-kubelet\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847225 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-systemd-units\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847246 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgvgv\" (UniqueName: \"kubernetes.io/projected/9974814c-932d-4ceb-85f1-4670f7cbdf84-kube-api-access-kgvgv\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847257 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-kubelet\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847275 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-etc-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847282 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-node-log\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847342 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-var-lib-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847309 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-ovn\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847346 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-etc-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847317 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-var-lib-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847425 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-cni-bin\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847462 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovnkube-script-lib\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847488 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847492 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-cni-bin\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847510 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-log-socket\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847531 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-run-openvswitch\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847533 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovn-node-metrics-cert\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847560 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-log-socket\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847578 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-run-netns\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847606 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847620 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-env-overrides\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847624 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-run-ovn-kubernetes\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847648 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-run-ovn-kubernetes\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847663 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-slash\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847675 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847720 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-slash\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847752 4721 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847771 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9974814c-932d-4ceb-85f1-4670f7cbdf84-host-run-netns\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847819 4721 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847837 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzjfx\" (UniqueName: \"kubernetes.io/projected/7db08ddd-c14b-40b0-a2fd-67f84d6e747a-kube-api-access-xzjfx\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.847841 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovnkube-config\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.848156 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovnkube-script-lib\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.851464 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9974814c-932d-4ceb-85f1-4670f7cbdf84-ovn-node-metrics-cert\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.863251 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgvgv\" (UniqueName: \"kubernetes.io/projected/9974814c-932d-4ceb-85f1-4670f7cbdf84-kube-api-access-kgvgv\") pod \"ovnkube-node-pg5jp\" (UID: \"9974814c-932d-4ceb-85f1-4670f7cbdf84\") " pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.928754 4721 generic.go:334] "Generic (PLEG): container finished" podID="3b78bc30-b79c-4091-8027-f430db02b2de" containerID="0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.928851 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frn77" event={"ID":"3b78bc30-b79c-4091-8027-f430db02b2de","Type":"ContainerDied","Data":"0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.931321 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/2.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.931859 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/1.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.931911 4721 generic.go:334] "Generic (PLEG): container finished" podID="15670f4b-cd59-4040-b6d5-87a7cf51d63f" containerID="7b044bf3d42e21f5a67f6c3b085ed6017d26c6a42d604dbf526eeb6573630466" exitCode=2 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.932004 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerDied","Data":"7b044bf3d42e21f5a67f6c3b085ed6017d26c6a42d604dbf526eeb6573630466"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.932092 4721 scope.go:117] "RemoveContainer" containerID="03dde5164495d54b9fddd3dbe88c4af8074e67b53b0eb90d645625879caa39f8" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.932910 4721 scope.go:117] "RemoveContainer" containerID="7b044bf3d42e21f5a67f6c3b085ed6017d26c6a42d604dbf526eeb6573630466" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.951451 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovnkube-controller/3.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.958922 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovn-acl-logging/0.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.959876 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x6mh9_7db08ddd-c14b-40b0-a2fd-67f84d6e747a/ovn-controller/0.log" Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960575 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960622 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960648 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960645 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960705 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960742 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960656 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960757 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960766 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960770 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960778 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" exitCode=0 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960786 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960810 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960825 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960831 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960837 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960842 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960849 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960875 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960787 4721 generic.go:334] "Generic (PLEG): container finished" podID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" exitCode=143 Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960883 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960920 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960934 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960956 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960988 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.960995 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961001 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961006 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961013 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961018 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961024 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961029 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961034 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961043 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961052 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" event={"ID":"7db08ddd-c14b-40b0-a2fd-67f84d6e747a","Type":"ContainerDied","Data":"1110b51553b6e2b69ff957ee8abe0b4faf00d0a2773d94e7f70f91395844be9b"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961072 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961091 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961097 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961103 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961108 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961114 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961120 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961127 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961132 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961138 4721 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} Nov 21 13:50:15 crc kubenswrapper[4721]: I1121 13:50:15.961033 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x6mh9" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.006032 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x6mh9"] Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.006107 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x6mh9"] Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.016856 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:16 crc kubenswrapper[4721]: W1121 13:50:16.036712 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9974814c_932d_4ceb_85f1_4670f7cbdf84.slice/crio-7f274c3198950661753f4f281c2ba69644063c8b8849a4d434c7ed753f657273 WatchSource:0}: Error finding container 7f274c3198950661753f4f281c2ba69644063c8b8849a4d434c7ed753f657273: Status 404 returned error can't find the container with id 7f274c3198950661753f4f281c2ba69644063c8b8849a4d434c7ed753f657273 Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.051145 4721 scope.go:117] "RemoveContainer" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.075597 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.119830 4721 scope.go:117] "RemoveContainer" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.134653 4721 scope.go:117] "RemoveContainer" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.149476 4721 scope.go:117] "RemoveContainer" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.164170 4721 scope.go:117] "RemoveContainer" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.179732 4721 scope.go:117] "RemoveContainer" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.192945 4721 scope.go:117] "RemoveContainer" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.206210 4721 scope.go:117] "RemoveContainer" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.220908 4721 scope.go:117] "RemoveContainer" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.238216 4721 scope.go:117] "RemoveContainer" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.238815 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": container with ID starting with 2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980 not found: ID does not exist" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.238880 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} err="failed to get container status \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": rpc error: code = NotFound desc = could not find container \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": container with ID starting with 2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.238911 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.239238 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": container with ID starting with 9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2 not found: ID does not exist" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.239262 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} err="failed to get container status \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": rpc error: code = NotFound desc = could not find container \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": container with ID starting with 9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.239278 4721 scope.go:117] "RemoveContainer" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.239531 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": container with ID starting with f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 not found: ID does not exist" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.239593 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} err="failed to get container status \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": rpc error: code = NotFound desc = could not find container \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": container with ID starting with f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.239612 4721 scope.go:117] "RemoveContainer" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.239872 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": container with ID starting with 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb not found: ID does not exist" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.239903 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} err="failed to get container status \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": rpc error: code = NotFound desc = could not find container \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": container with ID starting with 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.239922 4721 scope.go:117] "RemoveContainer" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.240421 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": container with ID starting with 9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8 not found: ID does not exist" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.240455 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} err="failed to get container status \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": rpc error: code = NotFound desc = could not find container \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": container with ID starting with 9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.240474 4721 scope.go:117] "RemoveContainer" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.240837 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": container with ID starting with 63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa not found: ID does not exist" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.240898 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} err="failed to get container status \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": rpc error: code = NotFound desc = could not find container \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": container with ID starting with 63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.240936 4721 scope.go:117] "RemoveContainer" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.241262 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": container with ID starting with 7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0 not found: ID does not exist" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.241298 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} err="failed to get container status \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": rpc error: code = NotFound desc = could not find container \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": container with ID starting with 7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.241320 4721 scope.go:117] "RemoveContainer" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.241729 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": container with ID starting with f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b not found: ID does not exist" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.241818 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} err="failed to get container status \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": rpc error: code = NotFound desc = could not find container \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": container with ID starting with f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.241851 4721 scope.go:117] "RemoveContainer" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.242241 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": container with ID starting with 0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9 not found: ID does not exist" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.242283 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} err="failed to get container status \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": rpc error: code = NotFound desc = could not find container \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": container with ID starting with 0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.242312 4721 scope.go:117] "RemoveContainer" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" Nov 21 13:50:16 crc kubenswrapper[4721]: E1121 13:50:16.242606 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": container with ID starting with e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c not found: ID does not exist" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.242642 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} err="failed to get container status \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": rpc error: code = NotFound desc = could not find container \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": container with ID starting with e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.242666 4721 scope.go:117] "RemoveContainer" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.242955 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} err="failed to get container status \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": rpc error: code = NotFound desc = could not find container \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": container with ID starting with 2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.242986 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.243290 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} err="failed to get container status \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": rpc error: code = NotFound desc = could not find container \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": container with ID starting with 9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.243327 4721 scope.go:117] "RemoveContainer" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.243630 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} err="failed to get container status \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": rpc error: code = NotFound desc = could not find container \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": container with ID starting with f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.243664 4721 scope.go:117] "RemoveContainer" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.243962 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} err="failed to get container status \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": rpc error: code = NotFound desc = could not find container \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": container with ID starting with 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.243996 4721 scope.go:117] "RemoveContainer" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.244340 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} err="failed to get container status \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": rpc error: code = NotFound desc = could not find container \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": container with ID starting with 9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.244369 4721 scope.go:117] "RemoveContainer" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.244632 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} err="failed to get container status \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": rpc error: code = NotFound desc = could not find container \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": container with ID starting with 63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.244668 4721 scope.go:117] "RemoveContainer" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.244896 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} err="failed to get container status \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": rpc error: code = NotFound desc = could not find container \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": container with ID starting with 7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.244958 4721 scope.go:117] "RemoveContainer" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.245328 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} err="failed to get container status \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": rpc error: code = NotFound desc = could not find container \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": container with ID starting with f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.245365 4721 scope.go:117] "RemoveContainer" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.245664 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} err="failed to get container status \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": rpc error: code = NotFound desc = could not find container \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": container with ID starting with 0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.245692 4721 scope.go:117] "RemoveContainer" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.245968 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} err="failed to get container status \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": rpc error: code = NotFound desc = could not find container \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": container with ID starting with e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246003 4721 scope.go:117] "RemoveContainer" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246283 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} err="failed to get container status \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": rpc error: code = NotFound desc = could not find container \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": container with ID starting with 2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246316 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246537 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} err="failed to get container status \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": rpc error: code = NotFound desc = could not find container \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": container with ID starting with 9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246561 4721 scope.go:117] "RemoveContainer" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246821 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} err="failed to get container status \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": rpc error: code = NotFound desc = could not find container \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": container with ID starting with f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.246852 4721 scope.go:117] "RemoveContainer" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247238 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} err="failed to get container status \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": rpc error: code = NotFound desc = could not find container \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": container with ID starting with 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247264 4721 scope.go:117] "RemoveContainer" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247496 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} err="failed to get container status \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": rpc error: code = NotFound desc = could not find container \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": container with ID starting with 9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247522 4721 scope.go:117] "RemoveContainer" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247738 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} err="failed to get container status \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": rpc error: code = NotFound desc = could not find container \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": container with ID starting with 63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247762 4721 scope.go:117] "RemoveContainer" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247960 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} err="failed to get container status \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": rpc error: code = NotFound desc = could not find container \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": container with ID starting with 7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.247984 4721 scope.go:117] "RemoveContainer" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.248207 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} err="failed to get container status \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": rpc error: code = NotFound desc = could not find container \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": container with ID starting with f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.248230 4721 scope.go:117] "RemoveContainer" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.248440 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} err="failed to get container status \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": rpc error: code = NotFound desc = could not find container \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": container with ID starting with 0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.248468 4721 scope.go:117] "RemoveContainer" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.248764 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} err="failed to get container status \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": rpc error: code = NotFound desc = could not find container \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": container with ID starting with e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.248801 4721 scope.go:117] "RemoveContainer" containerID="2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.249109 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980"} err="failed to get container status \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": rpc error: code = NotFound desc = could not find container \"2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980\": container with ID starting with 2874516a6d8365ad0eda269cd60d32963e71f4d408a3fb864c30976e05659980 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.249139 4721 scope.go:117] "RemoveContainer" containerID="9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.249422 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2"} err="failed to get container status \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": rpc error: code = NotFound desc = could not find container \"9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2\": container with ID starting with 9ee2d18b96001914fa1b8d3d9702344f13d8f405c8dfad442059010c135826f2 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.249451 4721 scope.go:117] "RemoveContainer" containerID="f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.249702 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880"} err="failed to get container status \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": rpc error: code = NotFound desc = could not find container \"f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880\": container with ID starting with f561353f21a6851683187a4ea0b8f79b9c3817ab180089c243806f9493d63880 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.249729 4721 scope.go:117] "RemoveContainer" containerID="3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.250083 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb"} err="failed to get container status \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": rpc error: code = NotFound desc = could not find container \"3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb\": container with ID starting with 3e15ac3fcf8acb69d9c9a74626d50fd32ad4337fe539df16946455b1c06411fb not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.250121 4721 scope.go:117] "RemoveContainer" containerID="9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.250436 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8"} err="failed to get container status \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": rpc error: code = NotFound desc = could not find container \"9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8\": container with ID starting with 9813c2b7e8fd8b4de31aa9968905fafe28c03989771ac0df3ac17c9b996c44c8 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.250468 4721 scope.go:117] "RemoveContainer" containerID="63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.250758 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa"} err="failed to get container status \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": rpc error: code = NotFound desc = could not find container \"63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa\": container with ID starting with 63625ff1d85e8a418145e5e42e99f47fcd81190744e1454ce722fac63366c9fa not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.250789 4721 scope.go:117] "RemoveContainer" containerID="7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251040 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0"} err="failed to get container status \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": rpc error: code = NotFound desc = could not find container \"7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0\": container with ID starting with 7b89b1cb80bb8824f8a6b1850f503cd143d5baf2dec12e0a9260b5dc9ab66ec0 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251092 4721 scope.go:117] "RemoveContainer" containerID="f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251353 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b"} err="failed to get container status \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": rpc error: code = NotFound desc = could not find container \"f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b\": container with ID starting with f1bdf6b2e33d1e1b801b0620c076e92673dca266842964bd4965cf4849ac9f5b not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251382 4721 scope.go:117] "RemoveContainer" containerID="0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251655 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9"} err="failed to get container status \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": rpc error: code = NotFound desc = could not find container \"0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9\": container with ID starting with 0f832ab0662971734662d37becf133b73f0aafdf387e0d551d3bcdb4b4da27c9 not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251685 4721 scope.go:117] "RemoveContainer" containerID="e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.251929 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c"} err="failed to get container status \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": rpc error: code = NotFound desc = could not find container \"e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c\": container with ID starting with e99fcceab44c7319571af0eaaddf7dae31030bc2c826702be243bfb8dec5397c not found: ID does not exist" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.972603 4721 generic.go:334] "Generic (PLEG): container finished" podID="9974814c-932d-4ceb-85f1-4670f7cbdf84" containerID="842b0e07b985f1b6b9eb5081af87180fdceaa336e0facf240994cda1dd82ba8e" exitCode=0 Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.972731 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerDied","Data":"842b0e07b985f1b6b9eb5081af87180fdceaa336e0facf240994cda1dd82ba8e"} Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.973191 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"7f274c3198950661753f4f281c2ba69644063c8b8849a4d434c7ed753f657273"} Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.992095 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hp6ft_15670f4b-cd59-4040-b6d5-87a7cf51d63f/kube-multus/2.log" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.993417 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db08ddd-c14b-40b0-a2fd-67f84d6e747a" path="/var/lib/kubelet/pods/7db08ddd-c14b-40b0-a2fd-67f84d6e747a/volumes" Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.995603 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frn77" event={"ID":"3b78bc30-b79c-4091-8027-f430db02b2de","Type":"ContainerStarted","Data":"04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92"} Nov 21 13:50:16 crc kubenswrapper[4721]: I1121 13:50:16.995665 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hp6ft" event={"ID":"15670f4b-cd59-4040-b6d5-87a7cf51d63f","Type":"ContainerStarted","Data":"f34ff114b0d41f349731bac66a3641d9c892883718a0942b85fd1be8bcd1c15b"} Nov 21 13:50:17 crc kubenswrapper[4721]: I1121 13:50:17.046140 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-frn77" podStartSLOduration=2.1891315000000002 podStartE2EDuration="5.04612265s" podCreationTimestamp="2025-11-21 13:50:12 +0000 UTC" firstStartedPulling="2025-11-21 13:50:13.905702712 +0000 UTC m=+851.883593970" lastFinishedPulling="2025-11-21 13:50:16.762693862 +0000 UTC m=+854.740585120" observedRunningTime="2025-11-21 13:50:17.043909332 +0000 UTC m=+855.021800600" watchObservedRunningTime="2025-11-21 13:50:17.04612265 +0000 UTC m=+855.024013918" Nov 21 13:50:18 crc kubenswrapper[4721]: I1121 13:50:18.004824 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"b277dede9853f050620166e7bf7fcbf7ff9599cfbfab80b90908e03f730a1e51"} Nov 21 13:50:18 crc kubenswrapper[4721]: I1121 13:50:18.005235 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"8aa186c528fabe85895dc4979d54ec0d7609b35c40237b61ae1e62fe7423df60"} Nov 21 13:50:18 crc kubenswrapper[4721]: I1121 13:50:18.005252 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"36e4a9597b09966a883d3dc0da490c7c50b3e8d0c9400c7a11aff30f6ed91ee1"} Nov 21 13:50:18 crc kubenswrapper[4721]: I1121 13:50:18.005263 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"eb95e63c2719c432405af5427ae22720d3586a484c7adc2af498c50dc8ee0ca7"} Nov 21 13:50:18 crc kubenswrapper[4721]: I1121 13:50:18.005274 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"829ce0a501597a3ac910056879e6aba7aa092e7fa3afaab64b3104113f1f3726"} Nov 21 13:50:18 crc kubenswrapper[4721]: I1121 13:50:18.005285 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"9d1592e634be195907ce0e6f5b2d9c8a560930111d404108a7a98af42dd31faa"} Nov 21 13:50:19 crc kubenswrapper[4721]: I1121 13:50:19.493514 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:50:19 crc kubenswrapper[4721]: I1121 13:50:19.539352 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:50:19 crc kubenswrapper[4721]: I1121 13:50:19.724459 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s6g8d"] Nov 21 13:50:21 crc kubenswrapper[4721]: I1121 13:50:21.032028 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"c7bdcc533410f324c813181f2289fb5d58ea5a803bfb66b1e879e6b4ae1da040"} Nov 21 13:50:21 crc kubenswrapper[4721]: I1121 13:50:21.032286 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s6g8d" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="registry-server" containerID="cri-o://4e993c585f53aedc0589997753d49efb946b7f694a97addc177451a8027590e1" gracePeriod=2 Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.039796 4721 generic.go:334] "Generic (PLEG): container finished" podID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerID="4e993c585f53aedc0589997753d49efb946b7f694a97addc177451a8027590e1" exitCode=0 Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.040051 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s6g8d" event={"ID":"dc27d915-7f27-4433-a93f-54115b72bc0b","Type":"ContainerDied","Data":"4e993c585f53aedc0589997753d49efb946b7f694a97addc177451a8027590e1"} Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.209033 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.362527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9pgm\" (UniqueName: \"kubernetes.io/projected/dc27d915-7f27-4433-a93f-54115b72bc0b-kube-api-access-h9pgm\") pod \"dc27d915-7f27-4433-a93f-54115b72bc0b\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.362709 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-catalog-content\") pod \"dc27d915-7f27-4433-a93f-54115b72bc0b\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.362769 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-utilities\") pod \"dc27d915-7f27-4433-a93f-54115b72bc0b\" (UID: \"dc27d915-7f27-4433-a93f-54115b72bc0b\") " Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.364760 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-utilities" (OuterVolumeSpecName: "utilities") pod "dc27d915-7f27-4433-a93f-54115b72bc0b" (UID: "dc27d915-7f27-4433-a93f-54115b72bc0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.370007 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc27d915-7f27-4433-a93f-54115b72bc0b-kube-api-access-h9pgm" (OuterVolumeSpecName: "kube-api-access-h9pgm") pod "dc27d915-7f27-4433-a93f-54115b72bc0b" (UID: "dc27d915-7f27-4433-a93f-54115b72bc0b"). InnerVolumeSpecName "kube-api-access-h9pgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.464929 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.464978 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9pgm\" (UniqueName: \"kubernetes.io/projected/dc27d915-7f27-4433-a93f-54115b72bc0b-kube-api-access-h9pgm\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.494207 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc27d915-7f27-4433-a93f-54115b72bc0b" (UID: "dc27d915-7f27-4433-a93f-54115b72bc0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.567162 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc27d915-7f27-4433-a93f-54115b72bc0b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.639351 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.639735 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:22 crc kubenswrapper[4721]: I1121 13:50:22.687492 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.058662 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s6g8d" event={"ID":"dc27d915-7f27-4433-a93f-54115b72bc0b","Type":"ContainerDied","Data":"7651d7f582eb0efc7557ae69e21978c3efa7aa6d9e88b4e9b3d71b1c5979e953"} Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.058761 4721 scope.go:117] "RemoveContainer" containerID="4e993c585f53aedc0589997753d49efb946b7f694a97addc177451a8027590e1" Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.058685 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s6g8d" Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.085989 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s6g8d"] Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.092500 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s6g8d"] Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.096502 4721 scope.go:117] "RemoveContainer" containerID="e7792b799c2a37c230b914685b828ebf00876ddea049796997b739b295ee705a" Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.136306 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:23 crc kubenswrapper[4721]: I1121 13:50:23.136524 4721 scope.go:117] "RemoveContainer" containerID="f21f877ef7303ed1a29f35e7f6367f1311de6f7720f7bc697c3834b840fd7c43" Nov 21 13:50:24 crc kubenswrapper[4721]: I1121 13:50:24.920684 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-frn77"] Nov 21 13:50:24 crc kubenswrapper[4721]: I1121 13:50:24.992360 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" path="/var/lib/kubelet/pods/dc27d915-7f27-4433-a93f-54115b72bc0b/volumes" Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.081288 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" event={"ID":"9974814c-932d-4ceb-85f1-4670f7cbdf84","Type":"ContainerStarted","Data":"f6c13f982f3991d8048eefd67783e3d632c57d1b9d2b970d8fb926e9d036c429"} Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.081724 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.081957 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.082014 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.119987 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.135432 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" podStartSLOduration=10.135408245 podStartE2EDuration="10.135408245s" podCreationTimestamp="2025-11-21 13:50:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:50:25.133780213 +0000 UTC m=+863.111671511" watchObservedRunningTime="2025-11-21 13:50:25.135408245 +0000 UTC m=+863.113299523" Nov 21 13:50:25 crc kubenswrapper[4721]: I1121 13:50:25.135470 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.084744 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-frn77" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="registry-server" containerID="cri-o://04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92" gracePeriod=2 Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.314944 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.419884 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvs4m\" (UniqueName: \"kubernetes.io/projected/3b78bc30-b79c-4091-8027-f430db02b2de-kube-api-access-nvs4m\") pod \"3b78bc30-b79c-4091-8027-f430db02b2de\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.420757 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-catalog-content\") pod \"3b78bc30-b79c-4091-8027-f430db02b2de\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.420849 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-utilities\") pod \"3b78bc30-b79c-4091-8027-f430db02b2de\" (UID: \"3b78bc30-b79c-4091-8027-f430db02b2de\") " Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.421576 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-utilities" (OuterVolumeSpecName: "utilities") pod "3b78bc30-b79c-4091-8027-f430db02b2de" (UID: "3b78bc30-b79c-4091-8027-f430db02b2de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.428222 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b78bc30-b79c-4091-8027-f430db02b2de-kube-api-access-nvs4m" (OuterVolumeSpecName: "kube-api-access-nvs4m") pod "3b78bc30-b79c-4091-8027-f430db02b2de" (UID: "3b78bc30-b79c-4091-8027-f430db02b2de"). InnerVolumeSpecName "kube-api-access-nvs4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.477565 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b78bc30-b79c-4091-8027-f430db02b2de" (UID: "3b78bc30-b79c-4091-8027-f430db02b2de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.522604 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.522669 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvs4m\" (UniqueName: \"kubernetes.io/projected/3b78bc30-b79c-4091-8027-f430db02b2de-kube-api-access-nvs4m\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:26 crc kubenswrapper[4721]: I1121 13:50:26.522700 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b78bc30-b79c-4091-8027-f430db02b2de-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.094293 4721 generic.go:334] "Generic (PLEG): container finished" podID="3b78bc30-b79c-4091-8027-f430db02b2de" containerID="04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92" exitCode=0 Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.094394 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frn77" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.094412 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frn77" event={"ID":"3b78bc30-b79c-4091-8027-f430db02b2de","Type":"ContainerDied","Data":"04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92"} Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.094486 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frn77" event={"ID":"3b78bc30-b79c-4091-8027-f430db02b2de","Type":"ContainerDied","Data":"b586804bfe94461eb6aed244998b3428894a1c094f7c350e44be422130df8fc9"} Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.094519 4721 scope.go:117] "RemoveContainer" containerID="04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.114845 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-frn77"] Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.121418 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-frn77"] Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.125874 4721 scope.go:117] "RemoveContainer" containerID="0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.147440 4721 scope.go:117] "RemoveContainer" containerID="ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.174712 4721 scope.go:117] "RemoveContainer" containerID="04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92" Nov 21 13:50:27 crc kubenswrapper[4721]: E1121 13:50:27.175538 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92\": container with ID starting with 04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92 not found: ID does not exist" containerID="04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.175589 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92"} err="failed to get container status \"04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92\": rpc error: code = NotFound desc = could not find container \"04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92\": container with ID starting with 04e8b329ecacfd16f2290b3ff5ea5faac7bfa0f8ba81356aa90b00ab26759f92 not found: ID does not exist" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.175618 4721 scope.go:117] "RemoveContainer" containerID="0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668" Nov 21 13:50:27 crc kubenswrapper[4721]: E1121 13:50:27.176420 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668\": container with ID starting with 0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668 not found: ID does not exist" containerID="0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.176486 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668"} err="failed to get container status \"0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668\": rpc error: code = NotFound desc = could not find container \"0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668\": container with ID starting with 0df7b2ff61b6377a3c5f6aaf7d0c37d29655948f7e23dff9607747ea77fa6668 not found: ID does not exist" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.176526 4721 scope.go:117] "RemoveContainer" containerID="ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16" Nov 21 13:50:27 crc kubenswrapper[4721]: E1121 13:50:27.176926 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16\": container with ID starting with ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16 not found: ID does not exist" containerID="ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16" Nov 21 13:50:27 crc kubenswrapper[4721]: I1121 13:50:27.176953 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16"} err="failed to get container status \"ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16\": rpc error: code = NotFound desc = could not find container \"ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16\": container with ID starting with ced9c4bf60f71a161254f8e3ae29c47e8d5b3745a21998edbe7554447eba6d16 not found: ID does not exist" Nov 21 13:50:28 crc kubenswrapper[4721]: I1121 13:50:28.994760 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" path="/var/lib/kubelet/pods/3b78bc30-b79c-4091-8027-f430db02b2de/volumes" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.421879 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5"] Nov 21 13:50:42 crc kubenswrapper[4721]: E1121 13:50:42.422755 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="extract-utilities" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.422776 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="extract-utilities" Nov 21 13:50:42 crc kubenswrapper[4721]: E1121 13:50:42.422797 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="registry-server" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.422809 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="registry-server" Nov 21 13:50:42 crc kubenswrapper[4721]: E1121 13:50:42.422829 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="extract-content" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.422842 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="extract-content" Nov 21 13:50:42 crc kubenswrapper[4721]: E1121 13:50:42.422869 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="extract-utilities" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.422882 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="extract-utilities" Nov 21 13:50:42 crc kubenswrapper[4721]: E1121 13:50:42.422900 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="extract-content" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.422912 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="extract-content" Nov 21 13:50:42 crc kubenswrapper[4721]: E1121 13:50:42.422928 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="registry-server" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.422939 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="registry-server" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.423127 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc27d915-7f27-4433-a93f-54115b72bc0b" containerName="registry-server" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.423154 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b78bc30-b79c-4091-8027-f430db02b2de" containerName="registry-server" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.424369 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.427638 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.437716 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5"] Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.532499 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.533335 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-889z8\" (UniqueName: \"kubernetes.io/projected/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-kube-api-access-889z8\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.533545 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.635219 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.635322 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.635373 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-889z8\" (UniqueName: \"kubernetes.io/projected/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-kube-api-access-889z8\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.636134 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:42 crc kubenswrapper[4721]: I1121 13:50:42.636281 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:43 crc kubenswrapper[4721]: I1121 13:50:42.659135 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-889z8\" (UniqueName: \"kubernetes.io/projected/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-kube-api-access-889z8\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:43 crc kubenswrapper[4721]: I1121 13:50:42.751616 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:43 crc kubenswrapper[4721]: I1121 13:50:43.733924 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5"] Nov 21 13:50:43 crc kubenswrapper[4721]: W1121 13:50:43.741245 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64eec2c9_7144_45ac_b7aa_b0fe255f45b4.slice/crio-f62c4702249ea0106441345cd9ba6b2f529f953183e81c21d6101c260f1a9d4e WatchSource:0}: Error finding container f62c4702249ea0106441345cd9ba6b2f529f953183e81c21d6101c260f1a9d4e: Status 404 returned error can't find the container with id f62c4702249ea0106441345cd9ba6b2f529f953183e81c21d6101c260f1a9d4e Nov 21 13:50:44 crc kubenswrapper[4721]: I1121 13:50:44.199377 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" event={"ID":"64eec2c9-7144-45ac-b7aa-b0fe255f45b4","Type":"ContainerStarted","Data":"f62c4702249ea0106441345cd9ba6b2f529f953183e81c21d6101c260f1a9d4e"} Nov 21 13:50:45 crc kubenswrapper[4721]: I1121 13:50:45.208022 4721 generic.go:334] "Generic (PLEG): container finished" podID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerID="f40e0781746c81eb77cac52717f6b7e3f71610519aa8c5c5e2555ca25ad0f4a2" exitCode=0 Nov 21 13:50:45 crc kubenswrapper[4721]: I1121 13:50:45.208417 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" event={"ID":"64eec2c9-7144-45ac-b7aa-b0fe255f45b4","Type":"ContainerDied","Data":"f40e0781746c81eb77cac52717f6b7e3f71610519aa8c5c5e2555ca25ad0f4a2"} Nov 21 13:50:46 crc kubenswrapper[4721]: I1121 13:50:46.050336 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pg5jp" Nov 21 13:50:53 crc kubenswrapper[4721]: I1121 13:50:53.273460 4721 generic.go:334] "Generic (PLEG): container finished" podID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerID="458316f0909fc24ba400d8b2dd0f8192532fb53b34a69a3728bc1093825fe6b4" exitCode=0 Nov 21 13:50:53 crc kubenswrapper[4721]: I1121 13:50:53.273642 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" event={"ID":"64eec2c9-7144-45ac-b7aa-b0fe255f45b4","Type":"ContainerDied","Data":"458316f0909fc24ba400d8b2dd0f8192532fb53b34a69a3728bc1093825fe6b4"} Nov 21 13:50:54 crc kubenswrapper[4721]: I1121 13:50:54.286662 4721 generic.go:334] "Generic (PLEG): container finished" podID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerID="d5cc1c1a279eaaa42f25641aeb66335bb732cf509827110babfad95f3f636384" exitCode=0 Nov 21 13:50:54 crc kubenswrapper[4721]: I1121 13:50:54.287109 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" event={"ID":"64eec2c9-7144-45ac-b7aa-b0fe255f45b4","Type":"ContainerDied","Data":"d5cc1c1a279eaaa42f25641aeb66335bb732cf509827110babfad95f3f636384"} Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.596263 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.734322 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-889z8\" (UniqueName: \"kubernetes.io/projected/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-kube-api-access-889z8\") pod \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.734430 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-util\") pod \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.734467 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-bundle\") pod \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\" (UID: \"64eec2c9-7144-45ac-b7aa-b0fe255f45b4\") " Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.735812 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-bundle" (OuterVolumeSpecName: "bundle") pod "64eec2c9-7144-45ac-b7aa-b0fe255f45b4" (UID: "64eec2c9-7144-45ac-b7aa-b0fe255f45b4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.743030 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-kube-api-access-889z8" (OuterVolumeSpecName: "kube-api-access-889z8") pod "64eec2c9-7144-45ac-b7aa-b0fe255f45b4" (UID: "64eec2c9-7144-45ac-b7aa-b0fe255f45b4"). InnerVolumeSpecName "kube-api-access-889z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.759278 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-util" (OuterVolumeSpecName: "util") pod "64eec2c9-7144-45ac-b7aa-b0fe255f45b4" (UID: "64eec2c9-7144-45ac-b7aa-b0fe255f45b4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.836536 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-util\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.837009 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:55 crc kubenswrapper[4721]: I1121 13:50:55.837205 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-889z8\" (UniqueName: \"kubernetes.io/projected/64eec2c9-7144-45ac-b7aa-b0fe255f45b4-kube-api-access-889z8\") on node \"crc\" DevicePath \"\"" Nov 21 13:50:56 crc kubenswrapper[4721]: I1121 13:50:56.301435 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" event={"ID":"64eec2c9-7144-45ac-b7aa-b0fe255f45b4","Type":"ContainerDied","Data":"f62c4702249ea0106441345cd9ba6b2f529f953183e81c21d6101c260f1a9d4e"} Nov 21 13:50:56 crc kubenswrapper[4721]: I1121 13:50:56.301496 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f62c4702249ea0106441345cd9ba6b2f529f953183e81c21d6101c260f1a9d4e" Nov 21 13:50:56 crc kubenswrapper[4721]: I1121 13:50:56.301578 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.139133 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb"] Nov 21 13:51:09 crc kubenswrapper[4721]: E1121 13:51:09.139860 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="util" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.139877 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="util" Nov 21 13:51:09 crc kubenswrapper[4721]: E1121 13:51:09.139897 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="extract" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.139905 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="extract" Nov 21 13:51:09 crc kubenswrapper[4721]: E1121 13:51:09.139912 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="pull" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.139918 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="pull" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.140002 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="64eec2c9-7144-45ac-b7aa-b0fe255f45b4" containerName="extract" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.140383 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.142619 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.142944 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.143094 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.143172 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-xv8k7" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.146527 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.158154 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb"] Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.220634 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/caad2501-e4ef-4058-9311-d2192becf981-webhook-cert\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.220703 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/caad2501-e4ef-4058-9311-d2192becf981-apiservice-cert\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.220733 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tsvm\" (UniqueName: \"kubernetes.io/projected/caad2501-e4ef-4058-9311-d2192becf981-kube-api-access-8tsvm\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.322376 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/caad2501-e4ef-4058-9311-d2192becf981-webhook-cert\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.323353 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/caad2501-e4ef-4058-9311-d2192becf981-apiservice-cert\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.323392 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tsvm\" (UniqueName: \"kubernetes.io/projected/caad2501-e4ef-4058-9311-d2192becf981-kube-api-access-8tsvm\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.329763 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/caad2501-e4ef-4058-9311-d2192becf981-webhook-cert\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.331756 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/caad2501-e4ef-4058-9311-d2192becf981-apiservice-cert\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.341284 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tsvm\" (UniqueName: \"kubernetes.io/projected/caad2501-e4ef-4058-9311-d2192becf981-kube-api-access-8tsvm\") pod \"metallb-operator-controller-manager-58688b66bd-4mdqb\" (UID: \"caad2501-e4ef-4058-9311-d2192becf981\") " pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.378688 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5"] Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.379501 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.381463 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.381777 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.381874 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-m78bc" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.394479 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5"] Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.424532 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95f7f7f4-5a42-4fae-a1ea-249e844aec92-webhook-cert\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.424600 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvbdd\" (UniqueName: \"kubernetes.io/projected/95f7f7f4-5a42-4fae-a1ea-249e844aec92-kube-api-access-xvbdd\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.424621 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95f7f7f4-5a42-4fae-a1ea-249e844aec92-apiservice-cert\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.455263 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.526294 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvbdd\" (UniqueName: \"kubernetes.io/projected/95f7f7f4-5a42-4fae-a1ea-249e844aec92-kube-api-access-xvbdd\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.526355 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95f7f7f4-5a42-4fae-a1ea-249e844aec92-apiservice-cert\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.526437 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95f7f7f4-5a42-4fae-a1ea-249e844aec92-webhook-cert\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.530552 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95f7f7f4-5a42-4fae-a1ea-249e844aec92-apiservice-cert\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.532730 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95f7f7f4-5a42-4fae-a1ea-249e844aec92-webhook-cert\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.552789 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvbdd\" (UniqueName: \"kubernetes.io/projected/95f7f7f4-5a42-4fae-a1ea-249e844aec92-kube-api-access-xvbdd\") pod \"metallb-operator-webhook-server-6dc6b575b-bq4b5\" (UID: \"95f7f7f4-5a42-4fae-a1ea-249e844aec92\") " pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.695824 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.758880 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb"] Nov 21 13:51:09 crc kubenswrapper[4721]: W1121 13:51:09.775903 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcaad2501_e4ef_4058_9311_d2192becf981.slice/crio-09ff397f45e10a66a8ff9073023e42d5df4f9b4fa6cf10bcc22652e642ae0130 WatchSource:0}: Error finding container 09ff397f45e10a66a8ff9073023e42d5df4f9b4fa6cf10bcc22652e642ae0130: Status 404 returned error can't find the container with id 09ff397f45e10a66a8ff9073023e42d5df4f9b4fa6cf10bcc22652e642ae0130 Nov 21 13:51:09 crc kubenswrapper[4721]: I1121 13:51:09.936902 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5"] Nov 21 13:51:09 crc kubenswrapper[4721]: W1121 13:51:09.945739 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95f7f7f4_5a42_4fae_a1ea_249e844aec92.slice/crio-2bdd4e1c189d09521d0f1430ce2ca93c29a4e84b906e2602266a6e3c9713761a WatchSource:0}: Error finding container 2bdd4e1c189d09521d0f1430ce2ca93c29a4e84b906e2602266a6e3c9713761a: Status 404 returned error can't find the container with id 2bdd4e1c189d09521d0f1430ce2ca93c29a4e84b906e2602266a6e3c9713761a Nov 21 13:51:10 crc kubenswrapper[4721]: I1121 13:51:10.378395 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" event={"ID":"95f7f7f4-5a42-4fae-a1ea-249e844aec92","Type":"ContainerStarted","Data":"2bdd4e1c189d09521d0f1430ce2ca93c29a4e84b906e2602266a6e3c9713761a"} Nov 21 13:51:10 crc kubenswrapper[4721]: I1121 13:51:10.379722 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" event={"ID":"caad2501-e4ef-4058-9311-d2192becf981","Type":"ContainerStarted","Data":"09ff397f45e10a66a8ff9073023e42d5df4f9b4fa6cf10bcc22652e642ae0130"} Nov 21 13:51:18 crc kubenswrapper[4721]: I1121 13:51:18.419602 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" event={"ID":"95f7f7f4-5a42-4fae-a1ea-249e844aec92","Type":"ContainerStarted","Data":"ba17a5de086572f0b2dec3224eb350c5394a1a83e8019ba56d02807d2b16bc40"} Nov 21 13:51:18 crc kubenswrapper[4721]: I1121 13:51:18.420256 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:18 crc kubenswrapper[4721]: I1121 13:51:18.422318 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" event={"ID":"caad2501-e4ef-4058-9311-d2192becf981","Type":"ContainerStarted","Data":"8943f6bdafc134583d4fabb8dacff71ca59426d6848adf1820468113f06db568"} Nov 21 13:51:18 crc kubenswrapper[4721]: I1121 13:51:18.422619 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:18 crc kubenswrapper[4721]: I1121 13:51:18.443644 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" podStartSLOduration=1.70944217 podStartE2EDuration="9.443623777s" podCreationTimestamp="2025-11-21 13:51:09 +0000 UTC" firstStartedPulling="2025-11-21 13:51:09.947977706 +0000 UTC m=+907.925868964" lastFinishedPulling="2025-11-21 13:51:17.682159313 +0000 UTC m=+915.660050571" observedRunningTime="2025-11-21 13:51:18.441965464 +0000 UTC m=+916.419856722" watchObservedRunningTime="2025-11-21 13:51:18.443623777 +0000 UTC m=+916.421515045" Nov 21 13:51:18 crc kubenswrapper[4721]: I1121 13:51:18.465477 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" podStartSLOduration=1.588040687 podStartE2EDuration="9.46546015s" podCreationTimestamp="2025-11-21 13:51:09 +0000 UTC" firstStartedPulling="2025-11-21 13:51:09.778073971 +0000 UTC m=+907.755965229" lastFinishedPulling="2025-11-21 13:51:17.655493434 +0000 UTC m=+915.633384692" observedRunningTime="2025-11-21 13:51:18.464839394 +0000 UTC m=+916.442730652" watchObservedRunningTime="2025-11-21 13:51:18.46546015 +0000 UTC m=+916.443351408" Nov 21 13:51:29 crc kubenswrapper[4721]: I1121 13:51:29.702295 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6dc6b575b-bq4b5" Nov 21 13:51:49 crc kubenswrapper[4721]: I1121 13:51:49.458634 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-58688b66bd-4mdqb" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.190271 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-sfbbq"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.192634 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.195784 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.195801 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.198146 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-9dqb7" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.215957 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.216861 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.220186 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.238313 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.275577 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-d97pd"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.276686 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.278201 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-xnc2v" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.278610 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.278986 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.279553 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.302707 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-72qg5"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.303680 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.310665 4721 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.316624 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-72qg5"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.346635 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-conf\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.346684 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk67l\" (UniqueName: \"kubernetes.io/projected/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-kube-api-access-qk67l\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.346710 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-metrics\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.346905 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-metrics-certs\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.346925 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-sockets\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.346944 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-reloader\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.347108 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dcc25858-9526-4309-a78c-4031fc67b915-cert\") pod \"frr-k8s-webhook-server-6998585d5-8ft7x\" (UID: \"dcc25858-9526-4309-a78c-4031fc67b915\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.347243 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw2pt\" (UniqueName: \"kubernetes.io/projected/dcc25858-9526-4309-a78c-4031fc67b915-kube-api-access-cw2pt\") pod \"frr-k8s-webhook-server-6998585d5-8ft7x\" (UID: \"dcc25858-9526-4309-a78c-4031fc67b915\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.347403 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-startup\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448681 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-startup\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448741 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b56a61e0-296e-4fae-adf6-9c2c5b86444f-cert\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448770 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-conf\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448796 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk67l\" (UniqueName: \"kubernetes.io/projected/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-kube-api-access-qk67l\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448825 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkb5p\" (UniqueName: \"kubernetes.io/projected/b56a61e0-296e-4fae-adf6-9c2c5b86444f-kube-api-access-wkb5p\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448855 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-metrics\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448880 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggrmj\" (UniqueName: \"kubernetes.io/projected/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-kube-api-access-ggrmj\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.448999 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b56a61e0-296e-4fae-adf6-9c2c5b86444f-metrics-certs\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449047 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-metrics-certs\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449126 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-metrics-certs\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449161 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-sockets\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449194 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-reloader\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449234 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dcc25858-9526-4309-a78c-4031fc67b915-cert\") pod \"frr-k8s-webhook-server-6998585d5-8ft7x\" (UID: \"dcc25858-9526-4309-a78c-4031fc67b915\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449274 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-metallb-excludel2\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449312 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw2pt\" (UniqueName: \"kubernetes.io/projected/dcc25858-9526-4309-a78c-4031fc67b915-kube-api-access-cw2pt\") pod \"frr-k8s-webhook-server-6998585d5-8ft7x\" (UID: \"dcc25858-9526-4309-a78c-4031fc67b915\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449363 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-metrics\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449389 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449535 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-sockets\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449583 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-reloader\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.449835 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-startup\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.450324 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-frr-conf\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.456692 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dcc25858-9526-4309-a78c-4031fc67b915-cert\") pod \"frr-k8s-webhook-server-6998585d5-8ft7x\" (UID: \"dcc25858-9526-4309-a78c-4031fc67b915\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.457591 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-metrics-certs\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.469458 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw2pt\" (UniqueName: \"kubernetes.io/projected/dcc25858-9526-4309-a78c-4031fc67b915-kube-api-access-cw2pt\") pod \"frr-k8s-webhook-server-6998585d5-8ft7x\" (UID: \"dcc25858-9526-4309-a78c-4031fc67b915\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.471093 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk67l\" (UniqueName: \"kubernetes.io/projected/d7e86b33-b3dd-4969-8f78-8749e9c9ef6c-kube-api-access-qk67l\") pod \"frr-k8s-sfbbq\" (UID: \"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c\") " pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.508585 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.529130 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550665 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-metallb-excludel2\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550737 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550790 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b56a61e0-296e-4fae-adf6-9c2c5b86444f-cert\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550824 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkb5p\" (UniqueName: \"kubernetes.io/projected/b56a61e0-296e-4fae-adf6-9c2c5b86444f-kube-api-access-wkb5p\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550852 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggrmj\" (UniqueName: \"kubernetes.io/projected/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-kube-api-access-ggrmj\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550885 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b56a61e0-296e-4fae-adf6-9c2c5b86444f-metrics-certs\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.550904 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-metrics-certs\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: E1121 13:51:50.550946 4721 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 21 13:51:50 crc kubenswrapper[4721]: E1121 13:51:50.551033 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist podName:e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7 nodeName:}" failed. No retries permitted until 2025-11-21 13:51:51.051011758 +0000 UTC m=+949.028903016 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist") pod "speaker-d97pd" (UID: "e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7") : secret "metallb-memberlist" not found Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.551537 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-metallb-excludel2\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.554225 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-metrics-certs\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.555259 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b56a61e0-296e-4fae-adf6-9c2c5b86444f-metrics-certs\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.559082 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b56a61e0-296e-4fae-adf6-9c2c5b86444f-cert\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.569997 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggrmj\" (UniqueName: \"kubernetes.io/projected/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-kube-api-access-ggrmj\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.572692 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkb5p\" (UniqueName: \"kubernetes.io/projected/b56a61e0-296e-4fae-adf6-9c2c5b86444f-kube-api-access-wkb5p\") pod \"controller-6c7b4b5f48-72qg5\" (UID: \"b56a61e0-296e-4fae-adf6-9c2c5b86444f\") " pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.617733 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.805902 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x"] Nov 21 13:51:50 crc kubenswrapper[4721]: I1121 13:51:50.905733 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-72qg5"] Nov 21 13:51:50 crc kubenswrapper[4721]: W1121 13:51:50.913625 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb56a61e0_296e_4fae_adf6_9c2c5b86444f.slice/crio-4cc891c4933b10b50580b910c6c7aad4203163022f86d3702d7d3a4ca5dfaf4e WatchSource:0}: Error finding container 4cc891c4933b10b50580b910c6c7aad4203163022f86d3702d7d3a4ca5dfaf4e: Status 404 returned error can't find the container with id 4cc891c4933b10b50580b910c6c7aad4203163022f86d3702d7d3a4ca5dfaf4e Nov 21 13:51:51 crc kubenswrapper[4721]: I1121 13:51:51.066913 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:51 crc kubenswrapper[4721]: E1121 13:51:51.067181 4721 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 21 13:51:51 crc kubenswrapper[4721]: E1121 13:51:51.067513 4721 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist podName:e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7 nodeName:}" failed. No retries permitted until 2025-11-21 13:51:52.067494778 +0000 UTC m=+950.045386036 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist") pod "speaker-d97pd" (UID: "e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7") : secret "metallb-memberlist" not found Nov 21 13:51:51 crc kubenswrapper[4721]: I1121 13:51:51.607691 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-72qg5" event={"ID":"b56a61e0-296e-4fae-adf6-9c2c5b86444f","Type":"ContainerStarted","Data":"058b06820cc322e3c76cb38c9d0db6f5e9171c9e8bc56a1578c8929cdc0b6ccd"} Nov 21 13:51:51 crc kubenswrapper[4721]: I1121 13:51:51.607749 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-72qg5" event={"ID":"b56a61e0-296e-4fae-adf6-9c2c5b86444f","Type":"ContainerStarted","Data":"4cc891c4933b10b50580b910c6c7aad4203163022f86d3702d7d3a4ca5dfaf4e"} Nov 21 13:51:51 crc kubenswrapper[4721]: I1121 13:51:51.608799 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" event={"ID":"dcc25858-9526-4309-a78c-4031fc67b915","Type":"ContainerStarted","Data":"a8e1fe7ba270d6ca505436ad09dd3dcdfa271c6bf1412753921d77bb153894ca"} Nov 21 13:51:51 crc kubenswrapper[4721]: I1121 13:51:51.610108 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"821aaef273d5703c9842d4e7d1ab32e3ea2461a54ce3a7aa5c0424f43b2e25e9"} Nov 21 13:51:52 crc kubenswrapper[4721]: I1121 13:51:52.081174 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:52 crc kubenswrapper[4721]: I1121 13:51:52.094381 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7-memberlist\") pod \"speaker-d97pd\" (UID: \"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7\") " pod="metallb-system/speaker-d97pd" Nov 21 13:51:52 crc kubenswrapper[4721]: I1121 13:51:52.390624 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-d97pd" Nov 21 13:51:52 crc kubenswrapper[4721]: W1121 13:51:52.439670 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6f9b6f9_1ef4_41f2_85cc_583980cdf8a7.slice/crio-afe1d7d399355ed428397e2dbd8046007640a0cea593d39831f42a5facd27f5d WatchSource:0}: Error finding container afe1d7d399355ed428397e2dbd8046007640a0cea593d39831f42a5facd27f5d: Status 404 returned error can't find the container with id afe1d7d399355ed428397e2dbd8046007640a0cea593d39831f42a5facd27f5d Nov 21 13:51:52 crc kubenswrapper[4721]: I1121 13:51:52.622104 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-d97pd" event={"ID":"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7","Type":"ContainerStarted","Data":"afe1d7d399355ed428397e2dbd8046007640a0cea593d39831f42a5facd27f5d"} Nov 21 13:51:53 crc kubenswrapper[4721]: I1121 13:51:53.630783 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-d97pd" event={"ID":"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7","Type":"ContainerStarted","Data":"52c8b1f6ef1a2141408fb4dc70e15f4ece90588dd9cd4dd82dabbc5e6ad6e91c"} Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.667915 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-d97pd" event={"ID":"e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7","Type":"ContainerStarted","Data":"f89abf2e598549dc25e1a32f07c60d91d67dd440afa9c0d29ef7f748d5fbf83a"} Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.668717 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-d97pd" Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.671207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-72qg5" event={"ID":"b56a61e0-296e-4fae-adf6-9c2c5b86444f","Type":"ContainerStarted","Data":"36080bd95632d627ebd864714f35dc7ec4e51f58457aeecb09f9f899e94ba175"} Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.671374 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.676030 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" event={"ID":"dcc25858-9526-4309-a78c-4031fc67b915","Type":"ContainerStarted","Data":"6f90fe5d82a22253d3c2196e1704a381afc1039f96787e35dff056ec36ed9053"} Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.676879 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.679438 4721 generic.go:334] "Generic (PLEG): container finished" podID="d7e86b33-b3dd-4969-8f78-8749e9c9ef6c" containerID="3af15b8e7ebc588875a56363fe00556136863e3dca1dd56689478f492b7e41e6" exitCode=0 Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.679490 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerDied","Data":"3af15b8e7ebc588875a56363fe00556136863e3dca1dd56689478f492b7e41e6"} Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.688581 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-d97pd" podStartSLOduration=3.587410768 podStartE2EDuration="8.688554934s" podCreationTimestamp="2025-11-21 13:51:50 +0000 UTC" firstStartedPulling="2025-11-21 13:51:52.837566045 +0000 UTC m=+950.815457303" lastFinishedPulling="2025-11-21 13:51:57.938710211 +0000 UTC m=+955.916601469" observedRunningTime="2025-11-21 13:51:58.685724471 +0000 UTC m=+956.663615739" watchObservedRunningTime="2025-11-21 13:51:58.688554934 +0000 UTC m=+956.666446232" Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.728576 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" podStartSLOduration=1.58174627 podStartE2EDuration="8.728554166s" podCreationTimestamp="2025-11-21 13:51:50 +0000 UTC" firstStartedPulling="2025-11-21 13:51:50.839169305 +0000 UTC m=+948.817060563" lastFinishedPulling="2025-11-21 13:51:57.985977201 +0000 UTC m=+955.963868459" observedRunningTime="2025-11-21 13:51:58.725442506 +0000 UTC m=+956.703333764" watchObservedRunningTime="2025-11-21 13:51:58.728554166 +0000 UTC m=+956.706445424" Nov 21 13:51:58 crc kubenswrapper[4721]: I1121 13:51:58.746473 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-72qg5" podStartSLOduration=1.948950268 podStartE2EDuration="8.746453678s" podCreationTimestamp="2025-11-21 13:51:50 +0000 UTC" firstStartedPulling="2025-11-21 13:51:51.116430761 +0000 UTC m=+949.094322009" lastFinishedPulling="2025-11-21 13:51:57.913934161 +0000 UTC m=+955.891825419" observedRunningTime="2025-11-21 13:51:58.741716216 +0000 UTC m=+956.719607494" watchObservedRunningTime="2025-11-21 13:51:58.746453678 +0000 UTC m=+956.724344937" Nov 21 13:51:59 crc kubenswrapper[4721]: I1121 13:51:59.689021 4721 generic.go:334] "Generic (PLEG): container finished" podID="d7e86b33-b3dd-4969-8f78-8749e9c9ef6c" containerID="cf0e26ea40a2be8af3532f455e061e9ba112eeb65ab1cdab277764fe5382ad2a" exitCode=0 Nov 21 13:51:59 crc kubenswrapper[4721]: I1121 13:51:59.689101 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerDied","Data":"cf0e26ea40a2be8af3532f455e061e9ba112eeb65ab1cdab277764fe5382ad2a"} Nov 21 13:52:00 crc kubenswrapper[4721]: I1121 13:52:00.697453 4721 generic.go:334] "Generic (PLEG): container finished" podID="d7e86b33-b3dd-4969-8f78-8749e9c9ef6c" containerID="5efd2952b5b6381d9f1734ba9d3e935522d1b0806a3e652ae08fc7dc2ba57f63" exitCode=0 Nov 21 13:52:00 crc kubenswrapper[4721]: I1121 13:52:00.697510 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerDied","Data":"5efd2952b5b6381d9f1734ba9d3e935522d1b0806a3e652ae08fc7dc2ba57f63"} Nov 21 13:52:01 crc kubenswrapper[4721]: I1121 13:52:01.710025 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"3b64eac46d6da2e14008ad546bd51da8bb96f92e0551ab571949474ec54f62ad"} Nov 21 13:52:01 crc kubenswrapper[4721]: I1121 13:52:01.710381 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"57110f4d317e1380f1ab15f9724e22ddb1c4a9f636d8c53e69498421f6165946"} Nov 21 13:52:01 crc kubenswrapper[4721]: I1121 13:52:01.710400 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"eccf803b733a3093dc27045cc56d835470f24631ddcf023122c217ff5ae8cadf"} Nov 21 13:52:01 crc kubenswrapper[4721]: I1121 13:52:01.710412 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"fa5cb42da602e6050cfc632773550f20ac6448cdd3ae996f172213c0a10c95bc"} Nov 21 13:52:02 crc kubenswrapper[4721]: I1121 13:52:02.393718 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-d97pd" Nov 21 13:52:02 crc kubenswrapper[4721]: I1121 13:52:02.719861 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"1c791ea04c3ab14987a7a44239193c44d6aa61fdd781d7f01994cea6dfa57097"} Nov 21 13:52:02 crc kubenswrapper[4721]: I1121 13:52:02.719904 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sfbbq" event={"ID":"d7e86b33-b3dd-4969-8f78-8749e9c9ef6c","Type":"ContainerStarted","Data":"82ebfd96a67fb21f23bc4d03ff2f5d16b1ac5f9e279c1c32385c3bc992d13057"} Nov 21 13:52:02 crc kubenswrapper[4721]: I1121 13:52:02.720030 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:52:02 crc kubenswrapper[4721]: I1121 13:52:02.744121 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-sfbbq" podStartSLOduration=5.503717811 podStartE2EDuration="12.744101552s" podCreationTimestamp="2025-11-21 13:51:50 +0000 UTC" firstStartedPulling="2025-11-21 13:51:50.699344836 +0000 UTC m=+948.677236094" lastFinishedPulling="2025-11-21 13:51:57.939728577 +0000 UTC m=+955.917619835" observedRunningTime="2025-11-21 13:52:02.740227902 +0000 UTC m=+960.718119160" watchObservedRunningTime="2025-11-21 13:52:02.744101552 +0000 UTC m=+960.721992810" Nov 21 13:52:05 crc kubenswrapper[4721]: I1121 13:52:05.262319 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:52:05 crc kubenswrapper[4721]: I1121 13:52:05.263284 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:52:05 crc kubenswrapper[4721]: I1121 13:52:05.509687 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:52:05 crc kubenswrapper[4721]: I1121 13:52:05.544965 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.730361 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-t799j"] Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.731367 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.733257 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.733308 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.733601 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-qjxl4" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.746791 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-t799j"] Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.822144 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24x44\" (UniqueName: \"kubernetes.io/projected/fae4c6a7-b36d-471d-8ecb-a10c5369cedd-kube-api-access-24x44\") pod \"mariadb-operator-index-t799j\" (UID: \"fae4c6a7-b36d-471d-8ecb-a10c5369cedd\") " pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.923350 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24x44\" (UniqueName: \"kubernetes.io/projected/fae4c6a7-b36d-471d-8ecb-a10c5369cedd-kube-api-access-24x44\") pod \"mariadb-operator-index-t799j\" (UID: \"fae4c6a7-b36d-471d-8ecb-a10c5369cedd\") " pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:08 crc kubenswrapper[4721]: I1121 13:52:08.942420 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24x44\" (UniqueName: \"kubernetes.io/projected/fae4c6a7-b36d-471d-8ecb-a10c5369cedd-kube-api-access-24x44\") pod \"mariadb-operator-index-t799j\" (UID: \"fae4c6a7-b36d-471d-8ecb-a10c5369cedd\") " pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:09 crc kubenswrapper[4721]: I1121 13:52:09.047612 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:09 crc kubenswrapper[4721]: I1121 13:52:09.495921 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-t799j"] Nov 21 13:52:09 crc kubenswrapper[4721]: W1121 13:52:09.500039 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfae4c6a7_b36d_471d_8ecb_a10c5369cedd.slice/crio-83317a259b74ec0a4d091acb6bed85b287f6d0e17c484047e8be169c2c585f30 WatchSource:0}: Error finding container 83317a259b74ec0a4d091acb6bed85b287f6d0e17c484047e8be169c2c585f30: Status 404 returned error can't find the container with id 83317a259b74ec0a4d091acb6bed85b287f6d0e17c484047e8be169c2c585f30 Nov 21 13:52:09 crc kubenswrapper[4721]: I1121 13:52:09.763355 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-t799j" event={"ID":"fae4c6a7-b36d-471d-8ecb-a10c5369cedd","Type":"ContainerStarted","Data":"83317a259b74ec0a4d091acb6bed85b287f6d0e17c484047e8be169c2c585f30"} Nov 21 13:52:10 crc kubenswrapper[4721]: I1121 13:52:10.512482 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-sfbbq" Nov 21 13:52:10 crc kubenswrapper[4721]: I1121 13:52:10.543604 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-8ft7x" Nov 21 13:52:10 crc kubenswrapper[4721]: I1121 13:52:10.623880 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-72qg5" Nov 21 13:52:12 crc kubenswrapper[4721]: I1121 13:52:12.505248 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-t799j"] Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.109757 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-g7mgz"] Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.110658 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.118173 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-g7mgz"] Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.291733 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v874x\" (UniqueName: \"kubernetes.io/projected/3b5273ec-581b-4d8c-a334-d063b3bef2e7-kube-api-access-v874x\") pod \"mariadb-operator-index-g7mgz\" (UID: \"3b5273ec-581b-4d8c-a334-d063b3bef2e7\") " pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.392865 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v874x\" (UniqueName: \"kubernetes.io/projected/3b5273ec-581b-4d8c-a334-d063b3bef2e7-kube-api-access-v874x\") pod \"mariadb-operator-index-g7mgz\" (UID: \"3b5273ec-581b-4d8c-a334-d063b3bef2e7\") " pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.417325 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v874x\" (UniqueName: \"kubernetes.io/projected/3b5273ec-581b-4d8c-a334-d063b3bef2e7-kube-api-access-v874x\") pod \"mariadb-operator-index-g7mgz\" (UID: \"3b5273ec-581b-4d8c-a334-d063b3bef2e7\") " pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:13 crc kubenswrapper[4721]: I1121 13:52:13.427031 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:17 crc kubenswrapper[4721]: I1121 13:52:17.500608 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-g7mgz"] Nov 21 13:52:17 crc kubenswrapper[4721]: W1121 13:52:17.513340 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b5273ec_581b_4d8c_a334_d063b3bef2e7.slice/crio-3dde41ec970df7765261ccdfe7be31af5620ec82205f53714c8c2be764e20799 WatchSource:0}: Error finding container 3dde41ec970df7765261ccdfe7be31af5620ec82205f53714c8c2be764e20799: Status 404 returned error can't find the container with id 3dde41ec970df7765261ccdfe7be31af5620ec82205f53714c8c2be764e20799 Nov 21 13:52:17 crc kubenswrapper[4721]: I1121 13:52:17.810308 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-g7mgz" event={"ID":"3b5273ec-581b-4d8c-a334-d063b3bef2e7","Type":"ContainerStarted","Data":"3dde41ec970df7765261ccdfe7be31af5620ec82205f53714c8c2be764e20799"} Nov 21 13:52:18 crc kubenswrapper[4721]: I1121 13:52:18.816491 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-t799j" event={"ID":"fae4c6a7-b36d-471d-8ecb-a10c5369cedd","Type":"ContainerStarted","Data":"d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488"} Nov 21 13:52:18 crc kubenswrapper[4721]: I1121 13:52:18.816571 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-t799j" podUID="fae4c6a7-b36d-471d-8ecb-a10c5369cedd" containerName="registry-server" containerID="cri-o://d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488" gracePeriod=2 Nov 21 13:52:18 crc kubenswrapper[4721]: I1121 13:52:18.819610 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-g7mgz" event={"ID":"3b5273ec-581b-4d8c-a334-d063b3bef2e7","Type":"ContainerStarted","Data":"58b1734616a752185fa97231a573ce852378150b1cb4e0b749ead63576eddaf9"} Nov 21 13:52:18 crc kubenswrapper[4721]: I1121 13:52:18.837150 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-t799j" podStartSLOduration=2.279325698 podStartE2EDuration="10.837134155s" podCreationTimestamp="2025-11-21 13:52:08 +0000 UTC" firstStartedPulling="2025-11-21 13:52:09.502323327 +0000 UTC m=+967.480214585" lastFinishedPulling="2025-11-21 13:52:18.060131764 +0000 UTC m=+976.038023042" observedRunningTime="2025-11-21 13:52:18.832220995 +0000 UTC m=+976.810112273" watchObservedRunningTime="2025-11-21 13:52:18.837134155 +0000 UTC m=+976.815025413" Nov 21 13:52:18 crc kubenswrapper[4721]: I1121 13:52:18.845486 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-g7mgz" podStartSLOduration=5.147919277 podStartE2EDuration="5.845466236s" podCreationTimestamp="2025-11-21 13:52:13 +0000 UTC" firstStartedPulling="2025-11-21 13:52:17.515379308 +0000 UTC m=+975.493270566" lastFinishedPulling="2025-11-21 13:52:18.212926267 +0000 UTC m=+976.190817525" observedRunningTime="2025-11-21 13:52:18.844618303 +0000 UTC m=+976.822509571" watchObservedRunningTime="2025-11-21 13:52:18.845466236 +0000 UTC m=+976.823357494" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.048575 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.578632 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.677954 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24x44\" (UniqueName: \"kubernetes.io/projected/fae4c6a7-b36d-471d-8ecb-a10c5369cedd-kube-api-access-24x44\") pod \"fae4c6a7-b36d-471d-8ecb-a10c5369cedd\" (UID: \"fae4c6a7-b36d-471d-8ecb-a10c5369cedd\") " Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.683553 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fae4c6a7-b36d-471d-8ecb-a10c5369cedd-kube-api-access-24x44" (OuterVolumeSpecName: "kube-api-access-24x44") pod "fae4c6a7-b36d-471d-8ecb-a10c5369cedd" (UID: "fae4c6a7-b36d-471d-8ecb-a10c5369cedd"). InnerVolumeSpecName "kube-api-access-24x44". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.779954 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24x44\" (UniqueName: \"kubernetes.io/projected/fae4c6a7-b36d-471d-8ecb-a10c5369cedd-kube-api-access-24x44\") on node \"crc\" DevicePath \"\"" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.826612 4721 generic.go:334] "Generic (PLEG): container finished" podID="fae4c6a7-b36d-471d-8ecb-a10c5369cedd" containerID="d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488" exitCode=0 Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.826677 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-t799j" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.826689 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-t799j" event={"ID":"fae4c6a7-b36d-471d-8ecb-a10c5369cedd","Type":"ContainerDied","Data":"d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488"} Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.826757 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-t799j" event={"ID":"fae4c6a7-b36d-471d-8ecb-a10c5369cedd","Type":"ContainerDied","Data":"83317a259b74ec0a4d091acb6bed85b287f6d0e17c484047e8be169c2c585f30"} Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.826780 4721 scope.go:117] "RemoveContainer" containerID="d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.850616 4721 scope.go:117] "RemoveContainer" containerID="d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.851890 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-t799j"] Nov 21 13:52:19 crc kubenswrapper[4721]: E1121 13:52:19.852528 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488\": container with ID starting with d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488 not found: ID does not exist" containerID="d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.852575 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488"} err="failed to get container status \"d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488\": rpc error: code = NotFound desc = could not find container \"d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488\": container with ID starting with d728f279338acc0b16d4dd04b41ddf26624152201dbe6e10adef2f31dd02d488 not found: ID does not exist" Nov 21 13:52:19 crc kubenswrapper[4721]: I1121 13:52:19.854745 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-t799j"] Nov 21 13:52:20 crc kubenswrapper[4721]: I1121 13:52:20.995676 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fae4c6a7-b36d-471d-8ecb-a10c5369cedd" path="/var/lib/kubelet/pods/fae4c6a7-b36d-471d-8ecb-a10c5369cedd/volumes" Nov 21 13:52:23 crc kubenswrapper[4721]: I1121 13:52:23.427315 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:23 crc kubenswrapper[4721]: I1121 13:52:23.427537 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:23 crc kubenswrapper[4721]: I1121 13:52:23.455011 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:23 crc kubenswrapper[4721]: I1121 13:52:23.881500 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-g7mgz" Nov 21 13:52:28 crc kubenswrapper[4721]: I1121 13:52:28.955145 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87"] Nov 21 13:52:28 crc kubenswrapper[4721]: E1121 13:52:28.955865 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae4c6a7-b36d-471d-8ecb-a10c5369cedd" containerName="registry-server" Nov 21 13:52:28 crc kubenswrapper[4721]: I1121 13:52:28.955878 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae4c6a7-b36d-471d-8ecb-a10c5369cedd" containerName="registry-server" Nov 21 13:52:28 crc kubenswrapper[4721]: I1121 13:52:28.955974 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae4c6a7-b36d-471d-8ecb-a10c5369cedd" containerName="registry-server" Nov 21 13:52:28 crc kubenswrapper[4721]: I1121 13:52:28.956701 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:28 crc kubenswrapper[4721]: I1121 13:52:28.963593 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tw86c" Nov 21 13:52:28 crc kubenswrapper[4721]: I1121 13:52:28.971213 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87"] Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.110690 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jgmf\" (UniqueName: \"kubernetes.io/projected/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-kube-api-access-6jgmf\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.110792 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-util\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.110834 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-bundle\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.211743 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jgmf\" (UniqueName: \"kubernetes.io/projected/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-kube-api-access-6jgmf\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.211830 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-util\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.211871 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-bundle\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.212381 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-util\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.212642 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-bundle\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.241245 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jgmf\" (UniqueName: \"kubernetes.io/projected/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-kube-api-access-6jgmf\") pod \"7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.274246 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.688870 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87"] Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.901603 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" event={"ID":"4316bbef-eb0e-42b5-b59a-1d4dc304aa78","Type":"ContainerStarted","Data":"ab86381d9df21f031bd34367d306a8ef49b4e6a43be7e0b745fb6bf91a0ad27c"} Nov 21 13:52:29 crc kubenswrapper[4721]: I1121 13:52:29.901957 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" event={"ID":"4316bbef-eb0e-42b5-b59a-1d4dc304aa78","Type":"ContainerStarted","Data":"84286743707e2a3e7a40b2d90fa76d077732e11598f6f17bc1acb3ea79c9f5b3"} Nov 21 13:52:30 crc kubenswrapper[4721]: I1121 13:52:30.909942 4721 generic.go:334] "Generic (PLEG): container finished" podID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerID="ab86381d9df21f031bd34367d306a8ef49b4e6a43be7e0b745fb6bf91a0ad27c" exitCode=0 Nov 21 13:52:30 crc kubenswrapper[4721]: I1121 13:52:30.909987 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" event={"ID":"4316bbef-eb0e-42b5-b59a-1d4dc304aa78","Type":"ContainerDied","Data":"ab86381d9df21f031bd34367d306a8ef49b4e6a43be7e0b745fb6bf91a0ad27c"} Nov 21 13:52:34 crc kubenswrapper[4721]: I1121 13:52:34.939049 4721 generic.go:334] "Generic (PLEG): container finished" podID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerID="dd09c2f3aa98014287a5ad6ff7d43263ca357b6d25fb2aa581cc968949e16d0d" exitCode=0 Nov 21 13:52:34 crc kubenswrapper[4721]: I1121 13:52:34.939286 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" event={"ID":"4316bbef-eb0e-42b5-b59a-1d4dc304aa78","Type":"ContainerDied","Data":"dd09c2f3aa98014287a5ad6ff7d43263ca357b6d25fb2aa581cc968949e16d0d"} Nov 21 13:52:35 crc kubenswrapper[4721]: I1121 13:52:35.261863 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:52:35 crc kubenswrapper[4721]: I1121 13:52:35.261960 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:52:35 crc kubenswrapper[4721]: I1121 13:52:35.949413 4721 generic.go:334] "Generic (PLEG): container finished" podID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerID="3bc479d815395e1335f5e0f88e5a1efb03fb2b904ea74707ba57a4b756ce29c1" exitCode=0 Nov 21 13:52:35 crc kubenswrapper[4721]: I1121 13:52:35.949483 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" event={"ID":"4316bbef-eb0e-42b5-b59a-1d4dc304aa78","Type":"ContainerDied","Data":"3bc479d815395e1335f5e0f88e5a1efb03fb2b904ea74707ba57a4b756ce29c1"} Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.315550 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.426089 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-bundle\") pod \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.426181 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jgmf\" (UniqueName: \"kubernetes.io/projected/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-kube-api-access-6jgmf\") pod \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.426208 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-util\") pod \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\" (UID: \"4316bbef-eb0e-42b5-b59a-1d4dc304aa78\") " Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.427120 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-bundle" (OuterVolumeSpecName: "bundle") pod "4316bbef-eb0e-42b5-b59a-1d4dc304aa78" (UID: "4316bbef-eb0e-42b5-b59a-1d4dc304aa78"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.431283 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-kube-api-access-6jgmf" (OuterVolumeSpecName: "kube-api-access-6jgmf") pod "4316bbef-eb0e-42b5-b59a-1d4dc304aa78" (UID: "4316bbef-eb0e-42b5-b59a-1d4dc304aa78"). InnerVolumeSpecName "kube-api-access-6jgmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.436458 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-util" (OuterVolumeSpecName: "util") pod "4316bbef-eb0e-42b5-b59a-1d4dc304aa78" (UID: "4316bbef-eb0e-42b5-b59a-1d4dc304aa78"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.528041 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jgmf\" (UniqueName: \"kubernetes.io/projected/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-kube-api-access-6jgmf\") on node \"crc\" DevicePath \"\"" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.528091 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-util\") on node \"crc\" DevicePath \"\"" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.528101 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4316bbef-eb0e-42b5-b59a-1d4dc304aa78-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.962528 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" event={"ID":"4316bbef-eb0e-42b5-b59a-1d4dc304aa78","Type":"ContainerDied","Data":"84286743707e2a3e7a40b2d90fa76d077732e11598f6f17bc1acb3ea79c9f5b3"} Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.962572 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84286743707e2a3e7a40b2d90fa76d077732e11598f6f17bc1acb3ea79c9f5b3" Nov 21 13:52:37 crc kubenswrapper[4721]: I1121 13:52:37.962578 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.835083 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl"] Nov 21 13:52:42 crc kubenswrapper[4721]: E1121 13:52:42.836580 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="extract" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.836676 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="extract" Nov 21 13:52:42 crc kubenswrapper[4721]: E1121 13:52:42.836758 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="util" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.836830 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="util" Nov 21 13:52:42 crc kubenswrapper[4721]: E1121 13:52:42.836905 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="pull" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.836971 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="pull" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.837185 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4316bbef-eb0e-42b5-b59a-1d4dc304aa78" containerName="extract" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.837992 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.840499 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.840513 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-sckt8" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.840623 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Nov 21 13:52:42 crc kubenswrapper[4721]: I1121 13:52:42.859929 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl"] Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.013160 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-apiservice-cert\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.013842 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqz8t\" (UniqueName: \"kubernetes.io/projected/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-kube-api-access-nqz8t\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.014288 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-webhook-cert\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.116036 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-webhook-cert\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.116123 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-apiservice-cert\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.116144 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqz8t\" (UniqueName: \"kubernetes.io/projected/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-kube-api-access-nqz8t\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.122479 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-webhook-cert\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.133524 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-apiservice-cert\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.137540 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqz8t\" (UniqueName: \"kubernetes.io/projected/3dbd2cda-dd31-423b-9510-c6afd1fc82f9-kube-api-access-nqz8t\") pod \"mariadb-operator-controller-manager-d9b6869ff-qsxxl\" (UID: \"3dbd2cda-dd31-423b-9510-c6afd1fc82f9\") " pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.159617 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:52:43 crc kubenswrapper[4721]: I1121 13:52:43.408024 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl"] Nov 21 13:52:44 crc kubenswrapper[4721]: I1121 13:52:44.000571 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" event={"ID":"3dbd2cda-dd31-423b-9510-c6afd1fc82f9","Type":"ContainerStarted","Data":"7649a3994ef81f3dd5ee7ffbe425c3512284410bb885515dd8fc21623886684d"} Nov 21 13:52:53 crc kubenswrapper[4721]: I1121 13:52:53.053207 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" event={"ID":"3dbd2cda-dd31-423b-9510-c6afd1fc82f9","Type":"ContainerStarted","Data":"1be90daef90c89126e52d4acbea77681f5bd543bf80d5e40524d1415c96e0ba6"} Nov 21 13:53:05 crc kubenswrapper[4721]: I1121 13:53:05.262623 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:53:05 crc kubenswrapper[4721]: I1121 13:53:05.263183 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:53:05 crc kubenswrapper[4721]: I1121 13:53:05.263237 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:53:05 crc kubenswrapper[4721]: I1121 13:53:05.263894 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"71a86cc2f09fdb3301718c24b412995cd6f1788d273d3c5f0145c7b6b096533f"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:53:05 crc kubenswrapper[4721]: I1121 13:53:05.263951 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://71a86cc2f09fdb3301718c24b412995cd6f1788d273d3c5f0145c7b6b096533f" gracePeriod=600 Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.142146 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" event={"ID":"3dbd2cda-dd31-423b-9510-c6afd1fc82f9","Type":"ContainerStarted","Data":"3457663cc86d3388a1d6d4dab2ded889ff3f9326ab4c85242c2de2c3ce301368"} Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.142920 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.145132 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="71a86cc2f09fdb3301718c24b412995cd6f1788d273d3c5f0145c7b6b096533f" exitCode=0 Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.145138 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"71a86cc2f09fdb3301718c24b412995cd6f1788d273d3c5f0145c7b6b096533f"} Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.145221 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"35f8b67009daee90d3db515f042a09a64d87fb15b884ad370362a42ff402804a"} Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.145253 4721 scope.go:117] "RemoveContainer" containerID="3c25faaf13321f5c86af730ca041b03706f375a923a1b990e773cef7516bc285" Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.147694 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" Nov 21 13:53:06 crc kubenswrapper[4721]: I1121 13:53:06.163115 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-d9b6869ff-qsxxl" podStartSLOduration=2.531456429 podStartE2EDuration="24.163094495s" podCreationTimestamp="2025-11-21 13:52:42 +0000 UTC" firstStartedPulling="2025-11-21 13:52:43.419142305 +0000 UTC m=+1001.397033583" lastFinishedPulling="2025-11-21 13:53:05.050780351 +0000 UTC m=+1023.028671649" observedRunningTime="2025-11-21 13:53:06.162366936 +0000 UTC m=+1024.140258244" watchObservedRunningTime="2025-11-21 13:53:06.163094495 +0000 UTC m=+1024.140985803" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.111460 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-dngnb"] Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.112646 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.115007 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-2dtk8" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.133795 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-dngnb"] Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.186495 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmth2\" (UniqueName: \"kubernetes.io/projected/228f7da2-c323-4417-a54b-7a6af230dd50-kube-api-access-zmth2\") pod \"infra-operator-index-dngnb\" (UID: \"228f7da2-c323-4417-a54b-7a6af230dd50\") " pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.288198 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmth2\" (UniqueName: \"kubernetes.io/projected/228f7da2-c323-4417-a54b-7a6af230dd50-kube-api-access-zmth2\") pod \"infra-operator-index-dngnb\" (UID: \"228f7da2-c323-4417-a54b-7a6af230dd50\") " pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.315310 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmth2\" (UniqueName: \"kubernetes.io/projected/228f7da2-c323-4417-a54b-7a6af230dd50-kube-api-access-zmth2\") pod \"infra-operator-index-dngnb\" (UID: \"228f7da2-c323-4417-a54b-7a6af230dd50\") " pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.429725 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:08 crc kubenswrapper[4721]: I1121 13:53:08.663675 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-dngnb"] Nov 21 13:53:09 crc kubenswrapper[4721]: I1121 13:53:09.169706 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dngnb" event={"ID":"228f7da2-c323-4417-a54b-7a6af230dd50","Type":"ContainerStarted","Data":"924f78265849fa0b342e4a83cfb753cb9b6e1c8d48bcc9f93e3e8949e416552d"} Nov 21 13:53:11 crc kubenswrapper[4721]: I1121 13:53:11.301630 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-dngnb"] Nov 21 13:53:11 crc kubenswrapper[4721]: I1121 13:53:11.913200 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-4zsjc"] Nov 21 13:53:11 crc kubenswrapper[4721]: I1121 13:53:11.914900 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:11 crc kubenswrapper[4721]: I1121 13:53:11.922047 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-4zsjc"] Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.042257 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcjvc\" (UniqueName: \"kubernetes.io/projected/c90b21c7-7bc7-4c0d-9893-63f8d9d784fc-kube-api-access-vcjvc\") pod \"infra-operator-index-4zsjc\" (UID: \"c90b21c7-7bc7-4c0d-9893-63f8d9d784fc\") " pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.143716 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcjvc\" (UniqueName: \"kubernetes.io/projected/c90b21c7-7bc7-4c0d-9893-63f8d9d784fc-kube-api-access-vcjvc\") pod \"infra-operator-index-4zsjc\" (UID: \"c90b21c7-7bc7-4c0d-9893-63f8d9d784fc\") " pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.184852 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcjvc\" (UniqueName: \"kubernetes.io/projected/c90b21c7-7bc7-4c0d-9893-63f8d9d784fc-kube-api-access-vcjvc\") pod \"infra-operator-index-4zsjc\" (UID: \"c90b21c7-7bc7-4c0d-9893-63f8d9d784fc\") " pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.196805 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dngnb" event={"ID":"228f7da2-c323-4417-a54b-7a6af230dd50","Type":"ContainerStarted","Data":"2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb"} Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.196935 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-dngnb" podUID="228f7da2-c323-4417-a54b-7a6af230dd50" containerName="registry-server" containerID="cri-o://2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb" gracePeriod=2 Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.217239 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-dngnb" podStartSLOduration=1.8476728599999999 podStartE2EDuration="4.217220013s" podCreationTimestamp="2025-11-21 13:53:08 +0000 UTC" firstStartedPulling="2025-11-21 13:53:08.677822682 +0000 UTC m=+1026.655713940" lastFinishedPulling="2025-11-21 13:53:11.047369815 +0000 UTC m=+1029.025261093" observedRunningTime="2025-11-21 13:53:12.216504124 +0000 UTC m=+1030.194395392" watchObservedRunningTime="2025-11-21 13:53:12.217220013 +0000 UTC m=+1030.195111271" Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.234226 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:12 crc kubenswrapper[4721]: I1121 13:53:12.648309 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-4zsjc"] Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.117747 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.204443 4721 generic.go:334] "Generic (PLEG): container finished" podID="228f7da2-c323-4417-a54b-7a6af230dd50" containerID="2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb" exitCode=0 Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.204567 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dngnb" event={"ID":"228f7da2-c323-4417-a54b-7a6af230dd50","Type":"ContainerDied","Data":"2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb"} Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.205103 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dngnb" event={"ID":"228f7da2-c323-4417-a54b-7a6af230dd50","Type":"ContainerDied","Data":"924f78265849fa0b342e4a83cfb753cb9b6e1c8d48bcc9f93e3e8949e416552d"} Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.204725 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dngnb" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.205151 4721 scope.go:117] "RemoveContainer" containerID="2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.206675 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4zsjc" event={"ID":"c90b21c7-7bc7-4c0d-9893-63f8d9d784fc","Type":"ContainerStarted","Data":"3a3a8bec3f097fbb1b66fb13cceb61b7a7f13bffbae91c250198551fcfeb52aa"} Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.220807 4721 scope.go:117] "RemoveContainer" containerID="2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb" Nov 21 13:53:13 crc kubenswrapper[4721]: E1121 13:53:13.221360 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb\": container with ID starting with 2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb not found: ID does not exist" containerID="2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.221414 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb"} err="failed to get container status \"2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb\": rpc error: code = NotFound desc = could not find container \"2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb\": container with ID starting with 2996efa67b4f6ad20df97674d01076021b3c1b5a206f5fe01d4f7631b9f0bbbb not found: ID does not exist" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.256713 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmth2\" (UniqueName: \"kubernetes.io/projected/228f7da2-c323-4417-a54b-7a6af230dd50-kube-api-access-zmth2\") pod \"228f7da2-c323-4417-a54b-7a6af230dd50\" (UID: \"228f7da2-c323-4417-a54b-7a6af230dd50\") " Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.262393 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/228f7da2-c323-4417-a54b-7a6af230dd50-kube-api-access-zmth2" (OuterVolumeSpecName: "kube-api-access-zmth2") pod "228f7da2-c323-4417-a54b-7a6af230dd50" (UID: "228f7da2-c323-4417-a54b-7a6af230dd50"). InnerVolumeSpecName "kube-api-access-zmth2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.357981 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmth2\" (UniqueName: \"kubernetes.io/projected/228f7da2-c323-4417-a54b-7a6af230dd50-kube-api-access-zmth2\") on node \"crc\" DevicePath \"\"" Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.536118 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-dngnb"] Nov 21 13:53:13 crc kubenswrapper[4721]: I1121 13:53:13.554687 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-dngnb"] Nov 21 13:53:14 crc kubenswrapper[4721]: I1121 13:53:14.997115 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="228f7da2-c323-4417-a54b-7a6af230dd50" path="/var/lib/kubelet/pods/228f7da2-c323-4417-a54b-7a6af230dd50/volumes" Nov 21 13:53:15 crc kubenswrapper[4721]: I1121 13:53:15.221535 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4zsjc" event={"ID":"c90b21c7-7bc7-4c0d-9893-63f8d9d784fc","Type":"ContainerStarted","Data":"a8348b8d5ed2e8f32d8e24a6bea0911191219fd94cc9774ac080b12b9029159b"} Nov 21 13:53:15 crc kubenswrapper[4721]: I1121 13:53:15.249264 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-4zsjc" podStartSLOduration=2.873990735 podStartE2EDuration="4.249239287s" podCreationTimestamp="2025-11-21 13:53:11 +0000 UTC" firstStartedPulling="2025-11-21 13:53:12.664627772 +0000 UTC m=+1030.642519030" lastFinishedPulling="2025-11-21 13:53:14.039876314 +0000 UTC m=+1032.017767582" observedRunningTime="2025-11-21 13:53:15.240425374 +0000 UTC m=+1033.218316642" watchObservedRunningTime="2025-11-21 13:53:15.249239287 +0000 UTC m=+1033.227130555" Nov 21 13:53:22 crc kubenswrapper[4721]: I1121 13:53:22.234827 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:22 crc kubenswrapper[4721]: I1121 13:53:22.235429 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:22 crc kubenswrapper[4721]: I1121 13:53:22.267764 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:22 crc kubenswrapper[4721]: I1121 13:53:22.342637 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-4zsjc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.739857 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc"] Nov 21 13:53:23 crc kubenswrapper[4721]: E1121 13:53:23.740104 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="228f7da2-c323-4417-a54b-7a6af230dd50" containerName="registry-server" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.740121 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="228f7da2-c323-4417-a54b-7a6af230dd50" containerName="registry-server" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.740241 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="228f7da2-c323-4417-a54b-7a6af230dd50" containerName="registry-server" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.741023 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.743209 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tw86c" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.749789 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc"] Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.797950 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7sh5\" (UniqueName: \"kubernetes.io/projected/4334b518-e582-4308-a3e7-cbb14d281b61-kube-api-access-c7sh5\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.798025 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-util\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.798110 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-bundle\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.900130 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-bundle\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.900613 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-bundle\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.900725 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7sh5\" (UniqueName: \"kubernetes.io/projected/4334b518-e582-4308-a3e7-cbb14d281b61-kube-api-access-c7sh5\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.900840 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-util\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.901215 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-util\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:23 crc kubenswrapper[4721]: I1121 13:53:23.940350 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7sh5\" (UniqueName: \"kubernetes.io/projected/4334b518-e582-4308-a3e7-cbb14d281b61-kube-api-access-c7sh5\") pod \"ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:24 crc kubenswrapper[4721]: I1121 13:53:24.062679 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:24 crc kubenswrapper[4721]: I1121 13:53:24.306957 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc"] Nov 21 13:53:25 crc kubenswrapper[4721]: I1121 13:53:25.299346 4721 generic.go:334] "Generic (PLEG): container finished" podID="4334b518-e582-4308-a3e7-cbb14d281b61" containerID="b0e05cbce8e32ca2ece4b84e295d03cb98522a2294b4285be1990fd4cec6cf09" exitCode=0 Nov 21 13:53:25 crc kubenswrapper[4721]: I1121 13:53:25.299470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" event={"ID":"4334b518-e582-4308-a3e7-cbb14d281b61","Type":"ContainerDied","Data":"b0e05cbce8e32ca2ece4b84e295d03cb98522a2294b4285be1990fd4cec6cf09"} Nov 21 13:53:25 crc kubenswrapper[4721]: I1121 13:53:25.299716 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" event={"ID":"4334b518-e582-4308-a3e7-cbb14d281b61","Type":"ContainerStarted","Data":"0066beeb3a88cb06b279b751315a2ecb302aea3ae1e965e91d042b2e0e922022"} Nov 21 13:53:27 crc kubenswrapper[4721]: I1121 13:53:27.314953 4721 generic.go:334] "Generic (PLEG): container finished" podID="4334b518-e582-4308-a3e7-cbb14d281b61" containerID="6da2e491775ac57af48e0fb06dcfcf6ac8483a8531fe9169d3a6f0b09497a916" exitCode=0 Nov 21 13:53:27 crc kubenswrapper[4721]: I1121 13:53:27.314990 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" event={"ID":"4334b518-e582-4308-a3e7-cbb14d281b61","Type":"ContainerDied","Data":"6da2e491775ac57af48e0fb06dcfcf6ac8483a8531fe9169d3a6f0b09497a916"} Nov 21 13:53:28 crc kubenswrapper[4721]: I1121 13:53:28.323295 4721 generic.go:334] "Generic (PLEG): container finished" podID="4334b518-e582-4308-a3e7-cbb14d281b61" containerID="9ec1566187be90e2d4900ceb97c602a35a0edecca077d26cea0518442587b3d1" exitCode=0 Nov 21 13:53:28 crc kubenswrapper[4721]: I1121 13:53:28.323337 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" event={"ID":"4334b518-e582-4308-a3e7-cbb14d281b61","Type":"ContainerDied","Data":"9ec1566187be90e2d4900ceb97c602a35a0edecca077d26cea0518442587b3d1"} Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.560222 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.686155 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-bundle\") pod \"4334b518-e582-4308-a3e7-cbb14d281b61\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.686201 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7sh5\" (UniqueName: \"kubernetes.io/projected/4334b518-e582-4308-a3e7-cbb14d281b61-kube-api-access-c7sh5\") pod \"4334b518-e582-4308-a3e7-cbb14d281b61\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.686241 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-util\") pod \"4334b518-e582-4308-a3e7-cbb14d281b61\" (UID: \"4334b518-e582-4308-a3e7-cbb14d281b61\") " Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.687421 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-bundle" (OuterVolumeSpecName: "bundle") pod "4334b518-e582-4308-a3e7-cbb14d281b61" (UID: "4334b518-e582-4308-a3e7-cbb14d281b61"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.693616 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4334b518-e582-4308-a3e7-cbb14d281b61-kube-api-access-c7sh5" (OuterVolumeSpecName: "kube-api-access-c7sh5") pod "4334b518-e582-4308-a3e7-cbb14d281b61" (UID: "4334b518-e582-4308-a3e7-cbb14d281b61"). InnerVolumeSpecName "kube-api-access-c7sh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.702600 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-util" (OuterVolumeSpecName: "util") pod "4334b518-e582-4308-a3e7-cbb14d281b61" (UID: "4334b518-e582-4308-a3e7-cbb14d281b61"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.788219 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-util\") on node \"crc\" DevicePath \"\"" Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.788258 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4334b518-e582-4308-a3e7-cbb14d281b61-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:53:29 crc kubenswrapper[4721]: I1121 13:53:29.788268 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7sh5\" (UniqueName: \"kubernetes.io/projected/4334b518-e582-4308-a3e7-cbb14d281b61-kube-api-access-c7sh5\") on node \"crc\" DevicePath \"\"" Nov 21 13:53:30 crc kubenswrapper[4721]: I1121 13:53:30.341139 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" event={"ID":"4334b518-e582-4308-a3e7-cbb14d281b61","Type":"ContainerDied","Data":"0066beeb3a88cb06b279b751315a2ecb302aea3ae1e965e91d042b2e0e922022"} Nov 21 13:53:30 crc kubenswrapper[4721]: I1121 13:53:30.341194 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0066beeb3a88cb06b279b751315a2ecb302aea3ae1e965e91d042b2e0e922022" Nov 21 13:53:30 crc kubenswrapper[4721]: I1121 13:53:30.341222 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.258092 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r"] Nov 21 13:53:37 crc kubenswrapper[4721]: E1121 13:53:37.259476 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="util" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.259499 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="util" Nov 21 13:53:37 crc kubenswrapper[4721]: E1121 13:53:37.259531 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="pull" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.259542 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="pull" Nov 21 13:53:37 crc kubenswrapper[4721]: E1121 13:53:37.259569 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="extract" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.259581 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="extract" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.259822 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="4334b518-e582-4308-a3e7-cbb14d281b61" containerName="extract" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.260870 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.266464 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.270870 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-n2t85" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.302008 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r"] Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.385984 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f96kq\" (UniqueName: \"kubernetes.io/projected/18246f53-8cd9-4eae-85d8-d7543af4d397-kube-api-access-f96kq\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.386047 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/18246f53-8cd9-4eae-85d8-d7543af4d397-webhook-cert\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.386606 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/18246f53-8cd9-4eae-85d8-d7543af4d397-apiservice-cert\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.488257 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/18246f53-8cd9-4eae-85d8-d7543af4d397-apiservice-cert\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.488321 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f96kq\" (UniqueName: \"kubernetes.io/projected/18246f53-8cd9-4eae-85d8-d7543af4d397-kube-api-access-f96kq\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.488370 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/18246f53-8cd9-4eae-85d8-d7543af4d397-webhook-cert\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.497191 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/18246f53-8cd9-4eae-85d8-d7543af4d397-apiservice-cert\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.501161 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/18246f53-8cd9-4eae-85d8-d7543af4d397-webhook-cert\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.512974 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f96kq\" (UniqueName: \"kubernetes.io/projected/18246f53-8cd9-4eae-85d8-d7543af4d397-kube-api-access-f96kq\") pod \"infra-operator-controller-manager-87d79d4dc-76n7r\" (UID: \"18246f53-8cd9-4eae-85d8-d7543af4d397\") " pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.581760 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:37 crc kubenswrapper[4721]: I1121 13:53:37.846242 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r"] Nov 21 13:53:37 crc kubenswrapper[4721]: W1121 13:53:37.851617 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18246f53_8cd9_4eae_85d8_d7543af4d397.slice/crio-f5c9c7d16e0afd92f6d40cd99f54d80e39c0598f419d3800fe07b08e57338f40 WatchSource:0}: Error finding container f5c9c7d16e0afd92f6d40cd99f54d80e39c0598f419d3800fe07b08e57338f40: Status 404 returned error can't find the container with id f5c9c7d16e0afd92f6d40cd99f54d80e39c0598f419d3800fe07b08e57338f40 Nov 21 13:53:38 crc kubenswrapper[4721]: I1121 13:53:38.398160 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" event={"ID":"18246f53-8cd9-4eae-85d8-d7543af4d397","Type":"ContainerStarted","Data":"f5c9c7d16e0afd92f6d40cd99f54d80e39c0598f419d3800fe07b08e57338f40"} Nov 21 13:53:41 crc kubenswrapper[4721]: I1121 13:53:41.419393 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" event={"ID":"18246f53-8cd9-4eae-85d8-d7543af4d397","Type":"ContainerStarted","Data":"93c51a5152bf279a0cd14ec92933346f7887ceb22a0a7ccc26417617625cff6b"} Nov 21 13:53:41 crc kubenswrapper[4721]: I1121 13:53:41.420332 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" event={"ID":"18246f53-8cd9-4eae-85d8-d7543af4d397","Type":"ContainerStarted","Data":"519003276f690ecf67828e34fc38d2ff72bf3a6ed5db3091b1eafe47f6fbdf18"} Nov 21 13:53:41 crc kubenswrapper[4721]: I1121 13:53:41.420357 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:41 crc kubenswrapper[4721]: I1121 13:53:41.440382 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" podStartSLOduration=1.6969082210000002 podStartE2EDuration="4.440366554s" podCreationTimestamp="2025-11-21 13:53:37 +0000 UTC" firstStartedPulling="2025-11-21 13:53:37.853628775 +0000 UTC m=+1055.831520033" lastFinishedPulling="2025-11-21 13:53:40.597087108 +0000 UTC m=+1058.574978366" observedRunningTime="2025-11-21 13:53:41.439848748 +0000 UTC m=+1059.417740046" watchObservedRunningTime="2025-11-21 13:53:41.440366554 +0000 UTC m=+1059.418257812" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.267082 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.268412 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.270677 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"galera-openstack-dockercfg-hxzwj" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.270768 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openstack-config-data" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.271257 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openshift-service-ca.crt" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.271263 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"kube-root-ca.crt" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.271451 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openstack-scripts" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.279933 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.281040 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.287438 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.288990 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.310018 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.318200 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.335117 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.377960 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378010 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-operator-scripts\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378043 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s27hr\" (UniqueName: \"kubernetes.io/projected/29905f76-0ed3-4b86-b2a8-5b9da17c225a-kube-api-access-s27hr\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378099 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-kolla-config\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378178 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ghds\" (UniqueName: \"kubernetes.io/projected/d3b77c54-c410-4bfc-867b-3ba283ab4d98-kube-api-access-5ghds\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378220 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378246 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29905f76-0ed3-4b86-b2a8-5b9da17c225a-config-data-generated\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378264 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3b77c54-c410-4bfc-867b-3ba283ab4d98-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378293 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378311 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-config-data-default\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378329 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378351 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378368 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-kolla-config\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378389 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4shgj\" (UniqueName: \"kubernetes.io/projected/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-kube-api-access-4shgj\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378408 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378429 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378452 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.378471 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-config-data-default\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.479610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ghds\" (UniqueName: \"kubernetes.io/projected/d3b77c54-c410-4bfc-867b-3ba283ab4d98-kube-api-access-5ghds\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.479656 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.479687 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29905f76-0ed3-4b86-b2a8-5b9da17c225a-config-data-generated\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.479705 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3b77c54-c410-4bfc-867b-3ba283ab4d98-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.479736 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-config-data-default\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480109 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") device mount path \"/mnt/openstack/pv01\"" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480127 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") device mount path \"/mnt/openstack/pv10\"" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480202 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/29905f76-0ed3-4b86-b2a8-5b9da17c225a-config-data-generated\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480238 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3b77c54-c410-4bfc-867b-3ba283ab4d98-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480279 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480310 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480333 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480641 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480681 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-kolla-config\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480720 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4shgj\" (UniqueName: \"kubernetes.io/projected/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-kube-api-access-4shgj\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.480744 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481105 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481137 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481225 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-kolla-config\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481251 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-config-data-default\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481221 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") device mount path \"/mnt/openstack/pv11\"" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481266 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481270 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481340 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-operator-scripts\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481403 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s27hr\" (UniqueName: \"kubernetes.io/projected/29905f76-0ed3-4b86-b2a8-5b9da17c225a-kube-api-access-s27hr\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481455 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-kolla-config\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481731 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.481923 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-config-data-default\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.482124 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-config-data-default\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.482235 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-kolla-config\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.482807 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29905f76-0ed3-4b86-b2a8-5b9da17c225a-operator-scripts\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.483204 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.483919 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3b77c54-c410-4bfc-867b-3ba283ab4d98-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.505758 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4shgj\" (UniqueName: \"kubernetes.io/projected/75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1-kube-api-access-4shgj\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.506115 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.506869 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s27hr\" (UniqueName: \"kubernetes.io/projected/29905f76-0ed3-4b86-b2a8-5b9da17c225a-kube-api-access-s27hr\") pod \"openstack-galera-2\" (UID: \"29905f76-0ed3-4b86-b2a8-5b9da17c225a\") " pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.509808 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1\") " pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.511435 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ghds\" (UniqueName: \"kubernetes.io/projected/d3b77c54-c410-4bfc-867b-3ba283ab4d98-kube-api-access-5ghds\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.516912 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"d3b77c54-c410-4bfc-867b-3ba283ab4d98\") " pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.609315 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.614773 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.626094 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:53:44 crc kubenswrapper[4721]: I1121 13:53:44.918864 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Nov 21 13:53:45 crc kubenswrapper[4721]: I1121 13:53:45.046800 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Nov 21 13:53:45 crc kubenswrapper[4721]: I1121 13:53:45.051161 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Nov 21 13:53:45 crc kubenswrapper[4721]: W1121 13:53:45.051658 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod29905f76_0ed3_4b86_b2a8_5b9da17c225a.slice/crio-a53d389b7f562e5fe8584eb26fa6e8a8ee047ecd9a8b217c337b6431a72ea1b9 WatchSource:0}: Error finding container a53d389b7f562e5fe8584eb26fa6e8a8ee047ecd9a8b217c337b6431a72ea1b9: Status 404 returned error can't find the container with id a53d389b7f562e5fe8584eb26fa6e8a8ee047ecd9a8b217c337b6431a72ea1b9 Nov 21 13:53:45 crc kubenswrapper[4721]: I1121 13:53:45.442967 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1","Type":"ContainerStarted","Data":"e0b27d0dd281acca834331bf2fff5356aff0e97ad7f57462d25922e7ebfea57e"} Nov 21 13:53:45 crc kubenswrapper[4721]: I1121 13:53:45.444275 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"29905f76-0ed3-4b86-b2a8-5b9da17c225a","Type":"ContainerStarted","Data":"a53d389b7f562e5fe8584eb26fa6e8a8ee047ecd9a8b217c337b6431a72ea1b9"} Nov 21 13:53:45 crc kubenswrapper[4721]: I1121 13:53:45.445153 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"d3b77c54-c410-4bfc-867b-3ba283ab4d98","Type":"ContainerStarted","Data":"80c471b53a3663fe095ad99d265a74e1151dc25e11bf8a6242a146d478839ba5"} Nov 21 13:53:47 crc kubenswrapper[4721]: I1121 13:53:47.586607 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.110200 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fsccc"] Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.111795 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.116350 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-qjt84" Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.122234 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fsccc"] Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.188527 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2x7s\" (UniqueName: \"kubernetes.io/projected/b7e7c40c-9247-4c76-bbf9-d6adc9849eab-kube-api-access-j2x7s\") pod \"rabbitmq-cluster-operator-index-fsccc\" (UID: \"b7e7c40c-9247-4c76-bbf9-d6adc9849eab\") " pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.290873 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2x7s\" (UniqueName: \"kubernetes.io/projected/b7e7c40c-9247-4c76-bbf9-d6adc9849eab-kube-api-access-j2x7s\") pod \"rabbitmq-cluster-operator-index-fsccc\" (UID: \"b7e7c40c-9247-4c76-bbf9-d6adc9849eab\") " pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.314795 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2x7s\" (UniqueName: \"kubernetes.io/projected/b7e7c40c-9247-4c76-bbf9-d6adc9849eab-kube-api-access-j2x7s\") pod \"rabbitmq-cluster-operator-index-fsccc\" (UID: \"b7e7c40c-9247-4c76-bbf9-d6adc9849eab\") " pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:53:51 crc kubenswrapper[4721]: I1121 13:53:51.446256 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.566775 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/memcached-0"] Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.584385 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.595290 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"memcached-config-data" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.597040 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"memcached-memcached-dockercfg-h6t8h" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.624552 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.722509 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mqmk\" (UniqueName: \"kubernetes.io/projected/ffed5669-7436-4a05-a10b-8decf1eee90c-kube-api-access-6mqmk\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.722640 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ffed5669-7436-4a05-a10b-8decf1eee90c-config-data\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.722671 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ffed5669-7436-4a05-a10b-8decf1eee90c-kolla-config\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.824397 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mqmk\" (UniqueName: \"kubernetes.io/projected/ffed5669-7436-4a05-a10b-8decf1eee90c-kube-api-access-6mqmk\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.824471 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ffed5669-7436-4a05-a10b-8decf1eee90c-config-data\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.824511 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ffed5669-7436-4a05-a10b-8decf1eee90c-kolla-config\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.825908 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ffed5669-7436-4a05-a10b-8decf1eee90c-kolla-config\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.826117 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ffed5669-7436-4a05-a10b-8decf1eee90c-config-data\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.848171 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mqmk\" (UniqueName: \"kubernetes.io/projected/ffed5669-7436-4a05-a10b-8decf1eee90c-kube-api-access-6mqmk\") pod \"memcached-0\" (UID: \"ffed5669-7436-4a05-a10b-8decf1eee90c\") " pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.930880 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Nov 21 13:53:58 crc kubenswrapper[4721]: I1121 13:53:58.974916 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-fsccc"] Nov 21 13:53:58 crc kubenswrapper[4721]: W1121 13:53:58.997860 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7e7c40c_9247_4c76_bbf9_d6adc9849eab.slice/crio-946b3ede7e430c04dff77670de5bde17ec473cc16bf61adb8a8ea5538cdd92ae WatchSource:0}: Error finding container 946b3ede7e430c04dff77670de5bde17ec473cc16bf61adb8a8ea5538cdd92ae: Status 404 returned error can't find the container with id 946b3ede7e430c04dff77670de5bde17ec473cc16bf61adb8a8ea5538cdd92ae Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.006143 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce" Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.006372 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4shgj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-1_horizon-kuttl-tests(75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.007616 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="horizon-kuttl-tests/openstack-galera-1" podUID="75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1" Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.307628 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce" Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.307830 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5ghds,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_horizon-kuttl-tests(d3b77c54-c410-4bfc-867b-3ba283ab4d98): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.309287 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="horizon-kuttl-tests/openstack-galera-0" podUID="d3b77c54-c410-4bfc-867b-3ba283ab4d98" Nov 21 13:53:59 crc kubenswrapper[4721]: I1121 13:53:59.391747 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Nov 21 13:53:59 crc kubenswrapper[4721]: W1121 13:53:59.394890 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffed5669_7436_4a05_a10b_8decf1eee90c.slice/crio-bb83947b3bfb0b02cb231f13ad8a0f171bf3e6a42c9f9a199908814b35d378cb WatchSource:0}: Error finding container bb83947b3bfb0b02cb231f13ad8a0f171bf3e6a42c9f9a199908814b35d378cb: Status 404 returned error can't find the container with id bb83947b3bfb0b02cb231f13ad8a0f171bf3e6a42c9f9a199908814b35d378cb Nov 21 13:53:59 crc kubenswrapper[4721]: I1121 13:53:59.535872 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" event={"ID":"b7e7c40c-9247-4c76-bbf9-d6adc9849eab","Type":"ContainerStarted","Data":"946b3ede7e430c04dff77670de5bde17ec473cc16bf61adb8a8ea5538cdd92ae"} Nov 21 13:53:59 crc kubenswrapper[4721]: I1121 13:53:59.536806 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"ffed5669-7436-4a05-a10b-8decf1eee90c","Type":"ContainerStarted","Data":"bb83947b3bfb0b02cb231f13ad8a0f171bf3e6a42c9f9a199908814b35d378cb"} Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.538223 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce\\\"\"" pod="horizon-kuttl-tests/openstack-galera-0" podUID="d3b77c54-c410-4bfc-867b-3ba283ab4d98" Nov 21 13:53:59 crc kubenswrapper[4721]: E1121 13:53:59.538295 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce\\\"\"" pod="horizon-kuttl-tests/openstack-galera-1" podUID="75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1" Nov 21 13:54:40 crc kubenswrapper[4721]: E1121 13:54:40.157925 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce" Nov 21 13:54:40 crc kubenswrapper[4721]: E1121 13:54:40.158750 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:10452e2144368e2f128c8fb8ef9e54880b06ef1d71d9f084a0217dcb099c51ce,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4shgj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-1_horizon-kuttl-tests(75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 13:54:40 crc kubenswrapper[4721]: E1121 13:54:40.160093 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="horizon-kuttl-tests/openstack-galera-1" podUID="75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1" Nov 21 13:54:42 crc kubenswrapper[4721]: I1121 13:54:42.830825 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"d3b77c54-c410-4bfc-867b-3ba283ab4d98","Type":"ContainerStarted","Data":"d637727a1989033b421e858ec711ed3fac4c066c6eb7724d5d84a51af2f1217c"} Nov 21 13:54:43 crc kubenswrapper[4721]: I1121 13:54:43.849468 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"29905f76-0ed3-4b86-b2a8-5b9da17c225a","Type":"ContainerStarted","Data":"50963c5eb0f0c1a814b15f477250dcff59816dba932bb5db4d687756ab548c46"} Nov 21 13:54:45 crc kubenswrapper[4721]: E1121 13:54:45.995948 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:70f96e30689dd93f63cb6975749846584466720948c3a588284911cdef7e2b3b" Nov 21 13:54:45 crc kubenswrapper[4721]: E1121 13:54:45.996727 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:70f96e30689dd93f63cb6975749846584466720948c3a588284911cdef7e2b3b,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:nd6h9dh5dfh5fhdch94h54fh59fh74h679h5dbh665h67h68hfch54fh5ffhchcfh5f4hbbh679h5cch654h57fh644h59ch66fh586h596h6ch54cq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6mqmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_horizon-kuttl-tests(ffed5669-7436-4a05-a10b-8decf1eee90c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 13:54:45 crc kubenswrapper[4721]: E1121 13:54:45.997960 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="horizon-kuttl-tests/memcached-0" podUID="ffed5669-7436-4a05-a10b-8decf1eee90c" Nov 21 13:54:46 crc kubenswrapper[4721]: E1121 13:54:46.872167 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:70f96e30689dd93f63cb6975749846584466720948c3a588284911cdef7e2b3b\\\"\"" pod="horizon-kuttl-tests/memcached-0" podUID="ffed5669-7436-4a05-a10b-8decf1eee90c" Nov 21 13:54:59 crc kubenswrapper[4721]: I1121 13:54:59.985335 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 13:55:05 crc kubenswrapper[4721]: I1121 13:55:05.262599 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:55:05 crc kubenswrapper[4721]: I1121 13:55:05.263162 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:55:06 crc kubenswrapper[4721]: E1121 13:55:06.448797 4721 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest" Nov 21 13:55:06 crc kubenswrapper[4721]: E1121 13:55:06.449342 4721 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j2x7s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-index-fsccc_openstack-operators(b7e7c40c-9247-4c76-bbf9-d6adc9849eab): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Nov 21 13:55:06 crc kubenswrapper[4721]: E1121 13:55:06.450602 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" podUID="b7e7c40c-9247-4c76-bbf9-d6adc9849eab" Nov 21 13:55:07 crc kubenswrapper[4721]: I1121 13:55:07.207482 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1","Type":"ContainerStarted","Data":"6eb5a95cd1481f9d921ca1aed939f9fd6cb9ca61ab9c7484cb06e14431988b60"} Nov 21 13:55:07 crc kubenswrapper[4721]: I1121 13:55:07.209338 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"ffed5669-7436-4a05-a10b-8decf1eee90c","Type":"ContainerStarted","Data":"2929acf0f7109d141dedf1b885efe140566f07e351a9ae2c6309552efd93159e"} Nov 21 13:55:07 crc kubenswrapper[4721]: I1121 13:55:07.209454 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/memcached-0" Nov 21 13:55:07 crc kubenswrapper[4721]: E1121 13:55:07.210358 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator-index:latest\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" podUID="b7e7c40c-9247-4c76-bbf9-d6adc9849eab" Nov 21 13:55:07 crc kubenswrapper[4721]: I1121 13:55:07.270705 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/memcached-0" podStartSLOduration=1.779198635 podStartE2EDuration="1m9.270686499s" podCreationTimestamp="2025-11-21 13:53:58 +0000 UTC" firstStartedPulling="2025-11-21 13:53:59.397942887 +0000 UTC m=+1077.375834145" lastFinishedPulling="2025-11-21 13:55:06.889430751 +0000 UTC m=+1144.867322009" observedRunningTime="2025-11-21 13:55:07.267135916 +0000 UTC m=+1145.245027174" watchObservedRunningTime="2025-11-21 13:55:07.270686499 +0000 UTC m=+1145.248577777" Nov 21 13:55:13 crc kubenswrapper[4721]: I1121 13:55:13.250163 4721 generic.go:334] "Generic (PLEG): container finished" podID="29905f76-0ed3-4b86-b2a8-5b9da17c225a" containerID="50963c5eb0f0c1a814b15f477250dcff59816dba932bb5db4d687756ab548c46" exitCode=0 Nov 21 13:55:13 crc kubenswrapper[4721]: I1121 13:55:13.250249 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"29905f76-0ed3-4b86-b2a8-5b9da17c225a","Type":"ContainerDied","Data":"50963c5eb0f0c1a814b15f477250dcff59816dba932bb5db4d687756ab548c46"} Nov 21 13:55:13 crc kubenswrapper[4721]: I1121 13:55:13.253345 4721 generic.go:334] "Generic (PLEG): container finished" podID="d3b77c54-c410-4bfc-867b-3ba283ab4d98" containerID="d637727a1989033b421e858ec711ed3fac4c066c6eb7724d5d84a51af2f1217c" exitCode=0 Nov 21 13:55:13 crc kubenswrapper[4721]: I1121 13:55:13.253389 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"d3b77c54-c410-4bfc-867b-3ba283ab4d98","Type":"ContainerDied","Data":"d637727a1989033b421e858ec711ed3fac4c066c6eb7724d5d84a51af2f1217c"} Nov 21 13:55:13 crc kubenswrapper[4721]: I1121 13:55:13.932028 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/memcached-0" Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.263253 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"29905f76-0ed3-4b86-b2a8-5b9da17c225a","Type":"ContainerStarted","Data":"2fd0dad345c7ce03ed59b93c5c5e385847e8637c391f6e223aa2128354c9b34c"} Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.265096 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"d3b77c54-c410-4bfc-867b-3ba283ab4d98","Type":"ContainerStarted","Data":"110e3a3ce0d2460c88f8fff0a170ec0d4f766c69e8de4c0ae18371ba57f8d27c"} Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.266759 4721 generic.go:334] "Generic (PLEG): container finished" podID="75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1" containerID="6eb5a95cd1481f9d921ca1aed939f9fd6cb9ca61ab9c7484cb06e14431988b60" exitCode=0 Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.266795 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1","Type":"ContainerDied","Data":"6eb5a95cd1481f9d921ca1aed939f9fd6cb9ca61ab9c7484cb06e14431988b60"} Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.285212 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-2" podStartSLOduration=36.184637072 podStartE2EDuration="1m31.285191434s" podCreationTimestamp="2025-11-21 13:53:43 +0000 UTC" firstStartedPulling="2025-11-21 13:53:45.057620171 +0000 UTC m=+1063.035511429" lastFinishedPulling="2025-11-21 13:54:40.158174503 +0000 UTC m=+1118.136065791" observedRunningTime="2025-11-21 13:55:14.283811588 +0000 UTC m=+1152.261702856" watchObservedRunningTime="2025-11-21 13:55:14.285191434 +0000 UTC m=+1152.263082712" Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.313560 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-0" podStartSLOduration=-9223371945.541239 podStartE2EDuration="1m31.313536932s" podCreationTimestamp="2025-11-21 13:53:43 +0000 UTC" firstStartedPulling="2025-11-21 13:53:45.060362203 +0000 UTC m=+1063.038253461" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:55:14.312816664 +0000 UTC m=+1152.290707932" watchObservedRunningTime="2025-11-21 13:55:14.313536932 +0000 UTC m=+1152.291428210" Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.610137 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.610191 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.616035 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:55:14 crc kubenswrapper[4721]: I1121 13:55:14.616254 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:55:15 crc kubenswrapper[4721]: I1121 13:55:15.273995 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1","Type":"ContainerStarted","Data":"c1a71f520c59c69524fb843aa36b2a34b26a1b300ce9b71520e4e091b2e50396"} Nov 21 13:55:15 crc kubenswrapper[4721]: I1121 13:55:15.305425 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-1" podStartSLOduration=-9223371944.549368 podStartE2EDuration="1m32.305407998s" podCreationTimestamp="2025-11-21 13:53:43 +0000 UTC" firstStartedPulling="2025-11-21 13:53:44.925507035 +0000 UTC m=+1062.903398303" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:55:15.300425808 +0000 UTC m=+1153.278317106" watchObservedRunningTime="2025-11-21 13:55:15.305407998 +0000 UTC m=+1153.283299266" Nov 21 13:55:24 crc kubenswrapper[4721]: I1121 13:55:24.626213 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:55:24 crc kubenswrapper[4721]: I1121 13:55:24.627247 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:55:25 crc kubenswrapper[4721]: I1121 13:55:25.252652 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:55:25 crc kubenswrapper[4721]: I1121 13:55:25.331528 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-2" Nov 21 13:55:25 crc kubenswrapper[4721]: I1121 13:55:25.338938 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" event={"ID":"b7e7c40c-9247-4c76-bbf9-d6adc9849eab","Type":"ContainerStarted","Data":"71b540b77da6fbdda17b368d12e5d0f2f8d47c32f18a5ab7d4ac29171ecc7475"} Nov 21 13:55:25 crc kubenswrapper[4721]: I1121 13:55:25.369528 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" podStartSLOduration=8.647535611 podStartE2EDuration="1m34.369509678s" podCreationTimestamp="2025-11-21 13:53:51 +0000 UTC" firstStartedPulling="2025-11-21 13:53:59.002571807 +0000 UTC m=+1076.980463105" lastFinishedPulling="2025-11-21 13:55:24.724545914 +0000 UTC m=+1162.702437172" observedRunningTime="2025-11-21 13:55:25.365018391 +0000 UTC m=+1163.342909649" watchObservedRunningTime="2025-11-21 13:55:25.369509678 +0000 UTC m=+1163.347400936" Nov 21 13:55:25 crc kubenswrapper[4721]: E1121 13:55:25.486618 4721 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.198:58764->38.102.83.198:37869: write tcp 38.102.83.198:58764->38.102.83.198:37869: write: broken pipe Nov 21 13:55:31 crc kubenswrapper[4721]: I1121 13:55:31.447371 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:55:31 crc kubenswrapper[4721]: I1121 13:55:31.448625 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:55:31 crc kubenswrapper[4721]: I1121 13:55:31.477624 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:55:32 crc kubenswrapper[4721]: I1121 13:55:32.446717 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-fsccc" Nov 21 13:55:34 crc kubenswrapper[4721]: I1121 13:55:34.675886 4721 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/openstack-galera-2" podUID="29905f76-0ed3-4b86-b2a8-5b9da17c225a" containerName="galera" probeResult="failure" output=< Nov 21 13:55:34 crc kubenswrapper[4721]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Nov 21 13:55:34 crc kubenswrapper[4721]: > Nov 21 13:55:35 crc kubenswrapper[4721]: I1121 13:55:35.262648 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:55:35 crc kubenswrapper[4721]: I1121 13:55:35.262742 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:55:36 crc kubenswrapper[4721]: I1121 13:55:36.073887 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:55:36 crc kubenswrapper[4721]: I1121 13:55:36.146569 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-0" Nov 21 13:55:40 crc kubenswrapper[4721]: I1121 13:55:40.719504 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:55:40 crc kubenswrapper[4721]: I1121 13:55:40.785353 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-1" Nov 21 13:55:41 crc kubenswrapper[4721]: I1121 13:55:41.440157 4721 generic.go:334] "Generic (PLEG): container finished" podID="18246f53-8cd9-4eae-85d8-d7543af4d397" containerID="519003276f690ecf67828e34fc38d2ff72bf3a6ed5db3091b1eafe47f6fbdf18" exitCode=1 Nov 21 13:55:41 crc kubenswrapper[4721]: I1121 13:55:41.440270 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" event={"ID":"18246f53-8cd9-4eae-85d8-d7543af4d397","Type":"ContainerDied","Data":"519003276f690ecf67828e34fc38d2ff72bf3a6ed5db3091b1eafe47f6fbdf18"} Nov 21 13:55:41 crc kubenswrapper[4721]: I1121 13:55:41.441056 4721 scope.go:117] "RemoveContainer" containerID="519003276f690ecf67828e34fc38d2ff72bf3a6ed5db3091b1eafe47f6fbdf18" Nov 21 13:55:42 crc kubenswrapper[4721]: I1121 13:55:42.452098 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" event={"ID":"18246f53-8cd9-4eae-85d8-d7543af4d397","Type":"ContainerStarted","Data":"ac7e9cb3b3a9b8638180a16e81401337b4eb25442237aec6538052050c49102b"} Nov 21 13:55:42 crc kubenswrapper[4721]: I1121 13:55:42.452678 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.149013 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm"] Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.153843 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.156258 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm"] Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.157132 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tw86c" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.279195 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.279538 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mdmw\" (UniqueName: \"kubernetes.io/projected/0fcc1f5e-b89d-488a-847d-820b59e1c53f-kube-api-access-6mdmw\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.279631 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.381889 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mdmw\" (UniqueName: \"kubernetes.io/projected/0fcc1f5e-b89d-488a-847d-820b59e1c53f-kube-api-access-6mdmw\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.382005 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.382053 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.382999 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.383006 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.405119 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mdmw\" (UniqueName: \"kubernetes.io/projected/0fcc1f5e-b89d-488a-847d-820b59e1c53f-kube-api-access-6mdmw\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.477923 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:45 crc kubenswrapper[4721]: I1121 13:55:45.721280 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm"] Nov 21 13:55:46 crc kubenswrapper[4721]: I1121 13:55:46.478688 4721 generic.go:334] "Generic (PLEG): container finished" podID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerID="a7c8f1da46f756626fc3be89f0c366b71987a2b471a6405634807a031502ebdc" exitCode=0 Nov 21 13:55:46 crc kubenswrapper[4721]: I1121 13:55:46.478768 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" event={"ID":"0fcc1f5e-b89d-488a-847d-820b59e1c53f","Type":"ContainerDied","Data":"a7c8f1da46f756626fc3be89f0c366b71987a2b471a6405634807a031502ebdc"} Nov 21 13:55:46 crc kubenswrapper[4721]: I1121 13:55:46.478960 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" event={"ID":"0fcc1f5e-b89d-488a-847d-820b59e1c53f","Type":"ContainerStarted","Data":"bf31e3576633980c25bcbdde3a0cd7dbc657c8ee824be6ad23a30a4c33f90798"} Nov 21 13:55:47 crc kubenswrapper[4721]: I1121 13:55:47.586032 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-87d79d4dc-76n7r" Nov 21 13:55:48 crc kubenswrapper[4721]: I1121 13:55:48.499867 4721 generic.go:334] "Generic (PLEG): container finished" podID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerID="873074cc13c80cf4e33d7770805ce6ebd5ff5af3a18606f1d3641d1964b10a7d" exitCode=0 Nov 21 13:55:48 crc kubenswrapper[4721]: I1121 13:55:48.499964 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" event={"ID":"0fcc1f5e-b89d-488a-847d-820b59e1c53f","Type":"ContainerDied","Data":"873074cc13c80cf4e33d7770805ce6ebd5ff5af3a18606f1d3641d1964b10a7d"} Nov 21 13:55:48 crc kubenswrapper[4721]: E1121 13:55:48.924555 4721 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fcc1f5e_b89d_488a_847d_820b59e1c53f.slice/crio-conmon-8c0769ec52728dc522e860eef563f86ee78fd15329c05902d4c9ba040d44f727.scope\": RecentStats: unable to find data in memory cache]" Nov 21 13:55:49 crc kubenswrapper[4721]: I1121 13:55:49.510212 4721 generic.go:334] "Generic (PLEG): container finished" podID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerID="8c0769ec52728dc522e860eef563f86ee78fd15329c05902d4c9ba040d44f727" exitCode=0 Nov 21 13:55:49 crc kubenswrapper[4721]: I1121 13:55:49.510287 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" event={"ID":"0fcc1f5e-b89d-488a-847d-820b59e1c53f","Type":"ContainerDied","Data":"8c0769ec52728dc522e860eef563f86ee78fd15329c05902d4c9ba040d44f727"} Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.812259 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.864585 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mdmw\" (UniqueName: \"kubernetes.io/projected/0fcc1f5e-b89d-488a-847d-820b59e1c53f-kube-api-access-6mdmw\") pod \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.864689 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-util\") pod \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.864727 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-bundle\") pod \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\" (UID: \"0fcc1f5e-b89d-488a-847d-820b59e1c53f\") " Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.865648 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-bundle" (OuterVolumeSpecName: "bundle") pod "0fcc1f5e-b89d-488a-847d-820b59e1c53f" (UID: "0fcc1f5e-b89d-488a-847d-820b59e1c53f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.871946 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fcc1f5e-b89d-488a-847d-820b59e1c53f-kube-api-access-6mdmw" (OuterVolumeSpecName: "kube-api-access-6mdmw") pod "0fcc1f5e-b89d-488a-847d-820b59e1c53f" (UID: "0fcc1f5e-b89d-488a-847d-820b59e1c53f"). InnerVolumeSpecName "kube-api-access-6mdmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.879380 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-util" (OuterVolumeSpecName: "util") pod "0fcc1f5e-b89d-488a-847d-820b59e1c53f" (UID: "0fcc1f5e-b89d-488a-847d-820b59e1c53f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.966615 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mdmw\" (UniqueName: \"kubernetes.io/projected/0fcc1f5e-b89d-488a-847d-820b59e1c53f-kube-api-access-6mdmw\") on node \"crc\" DevicePath \"\"" Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.966650 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-util\") on node \"crc\" DevicePath \"\"" Nov 21 13:55:50 crc kubenswrapper[4721]: I1121 13:55:50.966659 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0fcc1f5e-b89d-488a-847d-820b59e1c53f-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:55:51 crc kubenswrapper[4721]: I1121 13:55:51.529565 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" event={"ID":"0fcc1f5e-b89d-488a-847d-820b59e1c53f","Type":"ContainerDied","Data":"bf31e3576633980c25bcbdde3a0cd7dbc657c8ee824be6ad23a30a4c33f90798"} Nov 21 13:55:51 crc kubenswrapper[4721]: I1121 13:55:51.529647 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf31e3576633980c25bcbdde3a0cd7dbc657c8ee824be6ad23a30a4c33f90798" Nov 21 13:55:51 crc kubenswrapper[4721]: I1121 13:55:51.529758 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.789279 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth"] Nov 21 13:55:58 crc kubenswrapper[4721]: E1121 13:55:58.790168 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="util" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.790185 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="util" Nov 21 13:55:58 crc kubenswrapper[4721]: E1121 13:55:58.790201 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="extract" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.790208 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="extract" Nov 21 13:55:58 crc kubenswrapper[4721]: E1121 13:55:58.790220 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="pull" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.790227 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="pull" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.790384 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fcc1f5e-b89d-488a-847d-820b59e1c53f" containerName="extract" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.790962 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.797148 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-9p6hb" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.840859 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth"] Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.882804 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8s4x\" (UniqueName: \"kubernetes.io/projected/c76b426d-f716-45db-bb4d-75d15156d323-kube-api-access-l8s4x\") pod \"rabbitmq-cluster-operator-779fc9694b-j7fth\" (UID: \"c76b426d-f716-45db-bb4d-75d15156d323\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" Nov 21 13:55:58 crc kubenswrapper[4721]: I1121 13:55:58.984542 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8s4x\" (UniqueName: \"kubernetes.io/projected/c76b426d-f716-45db-bb4d-75d15156d323-kube-api-access-l8s4x\") pod \"rabbitmq-cluster-operator-779fc9694b-j7fth\" (UID: \"c76b426d-f716-45db-bb4d-75d15156d323\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" Nov 21 13:55:59 crc kubenswrapper[4721]: I1121 13:55:59.012176 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8s4x\" (UniqueName: \"kubernetes.io/projected/c76b426d-f716-45db-bb4d-75d15156d323-kube-api-access-l8s4x\") pod \"rabbitmq-cluster-operator-779fc9694b-j7fth\" (UID: \"c76b426d-f716-45db-bb4d-75d15156d323\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" Nov 21 13:55:59 crc kubenswrapper[4721]: I1121 13:55:59.121113 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" Nov 21 13:55:59 crc kubenswrapper[4721]: I1121 13:55:59.375826 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth"] Nov 21 13:55:59 crc kubenswrapper[4721]: I1121 13:55:59.584125 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" event={"ID":"c76b426d-f716-45db-bb4d-75d15156d323","Type":"ContainerStarted","Data":"b01b0f73f76f6c611c6175074c656079eeb0c09517735210173d55555dd21568"} Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.261911 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.262535 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.262598 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.263475 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35f8b67009daee90d3db515f042a09a64d87fb15b884ad370362a42ff402804a"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.263553 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://35f8b67009daee90d3db515f042a09a64d87fb15b884ad370362a42ff402804a" gracePeriod=600 Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.624125 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" event={"ID":"c76b426d-f716-45db-bb4d-75d15156d323","Type":"ContainerStarted","Data":"b43d24763c73c1f2dfb3223c4a4810e58929aa527622a270b995a407041f9591"} Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.630184 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="35f8b67009daee90d3db515f042a09a64d87fb15b884ad370362a42ff402804a" exitCode=0 Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.630229 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"35f8b67009daee90d3db515f042a09a64d87fb15b884ad370362a42ff402804a"} Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.630268 4721 scope.go:117] "RemoveContainer" containerID="71a86cc2f09fdb3301718c24b412995cd6f1788d273d3c5f0145c7b6b096533f" Nov 21 13:56:05 crc kubenswrapper[4721]: I1121 13:56:05.642113 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-j7fth" podStartSLOduration=1.896634655 podStartE2EDuration="7.642095591s" podCreationTimestamp="2025-11-21 13:55:58 +0000 UTC" firstStartedPulling="2025-11-21 13:55:59.403544233 +0000 UTC m=+1197.381435491" lastFinishedPulling="2025-11-21 13:56:05.149005149 +0000 UTC m=+1203.126896427" observedRunningTime="2025-11-21 13:56:05.638866825 +0000 UTC m=+1203.616758113" watchObservedRunningTime="2025-11-21 13:56:05.642095591 +0000 UTC m=+1203.619986849" Nov 21 13:56:06 crc kubenswrapper[4721]: I1121 13:56:06.638854 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"a1c015fb42138002e1bbf6657d93a4f584890b9fe82b0b256353c923999d7d76"} Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.346538 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.347920 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.350850 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-erlang-cookie" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.351135 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-default-user" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.351335 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"rabbitmq-server-conf" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.351476 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"rabbitmq-plugins-conf" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.352925 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-server-dockercfg-pwqv7" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.367228 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392151 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392203 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392234 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392248 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392291 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392313 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392329 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2tm9\" (UniqueName: \"kubernetes.io/projected/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-kube-api-access-c2tm9\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.392367 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493411 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493452 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493474 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2tm9\" (UniqueName: \"kubernetes.io/projected/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-kube-api-access-c2tm9\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493513 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493551 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493587 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.493601 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.494439 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.495003 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.495015 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.500158 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.500248 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.501284 4721 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.501324 4721 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cb0cfe9a5e64ee3a44eb00fc148444441efb33a51c27555140fd09593c5bd400/globalmount\"" pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.511000 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.524692 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34b6ba62-59a9-4f55-8abb-e76aaeaa5642\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.527181 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2tm9\" (UniqueName: \"kubernetes.io/projected/73ce16fc-de75-4ab2-a732-36a9b1c14c3a-kube-api-access-c2tm9\") pod \"rabbitmq-server-0\" (UID: \"73ce16fc-de75-4ab2-a732-36a9b1c14c3a\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.668761 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:12 crc kubenswrapper[4721]: I1121 13:56:12.921289 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Nov 21 13:56:13 crc kubenswrapper[4721]: I1121 13:56:13.682233 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"73ce16fc-de75-4ab2-a732-36a9b1c14c3a","Type":"ContainerStarted","Data":"f389783b51d4b6d86e704f7d157f437b3f453b0b4fcecc1795b64a9523003d6c"} Nov 21 13:56:22 crc kubenswrapper[4721]: I1121 13:56:22.748617 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"73ce16fc-de75-4ab2-a732-36a9b1c14c3a","Type":"ContainerStarted","Data":"a345a8dffec912f56a0cb85737202e8bd53f8b17d9f8ae41f83f10798de6918d"} Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.696253 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-4pj9l"] Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.697411 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.700315 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-g9x4g" Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.710377 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-4pj9l"] Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.780689 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4brwg\" (UniqueName: \"kubernetes.io/projected/aa287363-9dcc-402b-ba8f-032187456d2c-kube-api-access-4brwg\") pod \"keystone-operator-index-4pj9l\" (UID: \"aa287363-9dcc-402b-ba8f-032187456d2c\") " pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.881686 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4brwg\" (UniqueName: \"kubernetes.io/projected/aa287363-9dcc-402b-ba8f-032187456d2c-kube-api-access-4brwg\") pod \"keystone-operator-index-4pj9l\" (UID: \"aa287363-9dcc-402b-ba8f-032187456d2c\") " pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:24 crc kubenswrapper[4721]: I1121 13:56:24.902188 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4brwg\" (UniqueName: \"kubernetes.io/projected/aa287363-9dcc-402b-ba8f-032187456d2c-kube-api-access-4brwg\") pod \"keystone-operator-index-4pj9l\" (UID: \"aa287363-9dcc-402b-ba8f-032187456d2c\") " pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:25 crc kubenswrapper[4721]: I1121 13:56:25.022574 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:25 crc kubenswrapper[4721]: I1121 13:56:25.265652 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-4pj9l"] Nov 21 13:56:25 crc kubenswrapper[4721]: W1121 13:56:25.266942 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa287363_9dcc_402b_ba8f_032187456d2c.slice/crio-47697d5e80646afcb2a389095ce81570d681306e760f0e6d673e11d94553955f WatchSource:0}: Error finding container 47697d5e80646afcb2a389095ce81570d681306e760f0e6d673e11d94553955f: Status 404 returned error can't find the container with id 47697d5e80646afcb2a389095ce81570d681306e760f0e6d673e11d94553955f Nov 21 13:56:25 crc kubenswrapper[4721]: I1121 13:56:25.766687 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4pj9l" event={"ID":"aa287363-9dcc-402b-ba8f-032187456d2c","Type":"ContainerStarted","Data":"47697d5e80646afcb2a389095ce81570d681306e760f0e6d673e11d94553955f"} Nov 21 13:56:26 crc kubenswrapper[4721]: I1121 13:56:26.776642 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-4pj9l" event={"ID":"aa287363-9dcc-402b-ba8f-032187456d2c","Type":"ContainerStarted","Data":"373b81bfad2e15b42ab547408fd335add72918c344894d465df336775c2273d0"} Nov 21 13:56:26 crc kubenswrapper[4721]: I1121 13:56:26.803178 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-4pj9l" podStartSLOduration=1.9417732920000002 podStartE2EDuration="2.803153693s" podCreationTimestamp="2025-11-21 13:56:24 +0000 UTC" firstStartedPulling="2025-11-21 13:56:25.271038639 +0000 UTC m=+1223.248929907" lastFinishedPulling="2025-11-21 13:56:26.13241906 +0000 UTC m=+1224.110310308" observedRunningTime="2025-11-21 13:56:26.79666969 +0000 UTC m=+1224.774560968" watchObservedRunningTime="2025-11-21 13:56:26.803153693 +0000 UTC m=+1224.781044991" Nov 21 13:56:35 crc kubenswrapper[4721]: I1121 13:56:35.022974 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:35 crc kubenswrapper[4721]: I1121 13:56:35.023542 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:35 crc kubenswrapper[4721]: I1121 13:56:35.066936 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:35 crc kubenswrapper[4721]: I1121 13:56:35.868622 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-4pj9l" Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.719548 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc"] Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.723315 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.726913 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tw86c" Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.735090 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc"] Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.909376 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-util\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.909452 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bbcf\" (UniqueName: \"kubernetes.io/projected/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-kube-api-access-5bbcf\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:39 crc kubenswrapper[4721]: I1121 13:56:39.909978 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-bundle\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.011703 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-bundle\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.011857 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-util\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.011918 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bbcf\" (UniqueName: \"kubernetes.io/projected/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-kube-api-access-5bbcf\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.012298 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-bundle\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.013237 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-util\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.032059 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bbcf\" (UniqueName: \"kubernetes.io/projected/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-kube-api-access-5bbcf\") pod \"e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.041691 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.475022 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc"] Nov 21 13:56:40 crc kubenswrapper[4721]: W1121 13:56:40.481626 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66f9acf0_355c_4cf5_8e6a_b74cad5985f9.slice/crio-c19e65d306e8a0797c0741dbe17de5b6026224205e0ac17cb0815a09ed4030b3 WatchSource:0}: Error finding container c19e65d306e8a0797c0741dbe17de5b6026224205e0ac17cb0815a09ed4030b3: Status 404 returned error can't find the container with id c19e65d306e8a0797c0741dbe17de5b6026224205e0ac17cb0815a09ed4030b3 Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.875165 4721 generic.go:334] "Generic (PLEG): container finished" podID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerID="3f8d527bdff891af89b60527e00afd0ee33adf13f22de1d1522d480a5d9443e5" exitCode=0 Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.875223 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" event={"ID":"66f9acf0-355c-4cf5-8e6a-b74cad5985f9","Type":"ContainerDied","Data":"3f8d527bdff891af89b60527e00afd0ee33adf13f22de1d1522d480a5d9443e5"} Nov 21 13:56:40 crc kubenswrapper[4721]: I1121 13:56:40.875466 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" event={"ID":"66f9acf0-355c-4cf5-8e6a-b74cad5985f9","Type":"ContainerStarted","Data":"c19e65d306e8a0797c0741dbe17de5b6026224205e0ac17cb0815a09ed4030b3"} Nov 21 13:56:42 crc kubenswrapper[4721]: I1121 13:56:42.887505 4721 generic.go:334] "Generic (PLEG): container finished" podID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerID="8e75e6f40402f4facc69581320644ea577d33511cc50e5b2aadebc4fe48e35fc" exitCode=0 Nov 21 13:56:42 crc kubenswrapper[4721]: I1121 13:56:42.887604 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" event={"ID":"66f9acf0-355c-4cf5-8e6a-b74cad5985f9","Type":"ContainerDied","Data":"8e75e6f40402f4facc69581320644ea577d33511cc50e5b2aadebc4fe48e35fc"} Nov 21 13:56:43 crc kubenswrapper[4721]: I1121 13:56:43.908234 4721 generic.go:334] "Generic (PLEG): container finished" podID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerID="b710c5807d82dce4628dd53d71257cbe8418b708b119042f17bc601c987e6ed3" exitCode=0 Nov 21 13:56:43 crc kubenswrapper[4721]: I1121 13:56:43.908320 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" event={"ID":"66f9acf0-355c-4cf5-8e6a-b74cad5985f9","Type":"ContainerDied","Data":"b710c5807d82dce4628dd53d71257cbe8418b708b119042f17bc601c987e6ed3"} Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.191098 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.296558 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-util\") pod \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.296757 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-bundle\") pod \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.296796 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bbcf\" (UniqueName: \"kubernetes.io/projected/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-kube-api-access-5bbcf\") pod \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\" (UID: \"66f9acf0-355c-4cf5-8e6a-b74cad5985f9\") " Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.297411 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-bundle" (OuterVolumeSpecName: "bundle") pod "66f9acf0-355c-4cf5-8e6a-b74cad5985f9" (UID: "66f9acf0-355c-4cf5-8e6a-b74cad5985f9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.302475 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-kube-api-access-5bbcf" (OuterVolumeSpecName: "kube-api-access-5bbcf") pod "66f9acf0-355c-4cf5-8e6a-b74cad5985f9" (UID: "66f9acf0-355c-4cf5-8e6a-b74cad5985f9"). InnerVolumeSpecName "kube-api-access-5bbcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.313428 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-util" (OuterVolumeSpecName: "util") pod "66f9acf0-355c-4cf5-8e6a-b74cad5985f9" (UID: "66f9acf0-355c-4cf5-8e6a-b74cad5985f9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.398259 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.398295 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bbcf\" (UniqueName: \"kubernetes.io/projected/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-kube-api-access-5bbcf\") on node \"crc\" DevicePath \"\"" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.398306 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/66f9acf0-355c-4cf5-8e6a-b74cad5985f9-util\") on node \"crc\" DevicePath \"\"" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.928703 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" event={"ID":"66f9acf0-355c-4cf5-8e6a-b74cad5985f9","Type":"ContainerDied","Data":"c19e65d306e8a0797c0741dbe17de5b6026224205e0ac17cb0815a09ed4030b3"} Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.928778 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c19e65d306e8a0797c0741dbe17de5b6026224205e0ac17cb0815a09ed4030b3" Nov 21 13:56:45 crc kubenswrapper[4721]: I1121 13:56:45.928809 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc" Nov 21 13:56:55 crc kubenswrapper[4721]: I1121 13:56:55.001606 4721 generic.go:334] "Generic (PLEG): container finished" podID="73ce16fc-de75-4ab2-a732-36a9b1c14c3a" containerID="a345a8dffec912f56a0cb85737202e8bd53f8b17d9f8ae41f83f10798de6918d" exitCode=0 Nov 21 13:56:55 crc kubenswrapper[4721]: I1121 13:56:55.001680 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"73ce16fc-de75-4ab2-a732-36a9b1c14c3a","Type":"ContainerDied","Data":"a345a8dffec912f56a0cb85737202e8bd53f8b17d9f8ae41f83f10798de6918d"} Nov 21 13:56:57 crc kubenswrapper[4721]: I1121 13:56:57.014698 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"73ce16fc-de75-4ab2-a732-36a9b1c14c3a","Type":"ContainerStarted","Data":"069997bb74ccb3c3bf910fbf4c81918c575f32e9fadf53e3df6db4c3e1af9040"} Nov 21 13:56:57 crc kubenswrapper[4721]: I1121 13:56:57.015488 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:56:57 crc kubenswrapper[4721]: I1121 13:56:57.041223 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.856210596 podStartE2EDuration="46.041205703s" podCreationTimestamp="2025-11-21 13:56:11 +0000 UTC" firstStartedPulling="2025-11-21 13:56:12.932425831 +0000 UTC m=+1210.910317089" lastFinishedPulling="2025-11-21 13:56:21.117420898 +0000 UTC m=+1219.095312196" observedRunningTime="2025-11-21 13:56:57.038661334 +0000 UTC m=+1255.016552592" watchObservedRunningTime="2025-11-21 13:56:57.041205703 +0000 UTC m=+1255.019096961" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.937839 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm"] Nov 21 13:56:58 crc kubenswrapper[4721]: E1121 13:56:58.938473 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="util" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.938492 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="util" Nov 21 13:56:58 crc kubenswrapper[4721]: E1121 13:56:58.938512 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="extract" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.938522 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="extract" Nov 21 13:56:58 crc kubenswrapper[4721]: E1121 13:56:58.938540 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="pull" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.938548 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="pull" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.938671 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="66f9acf0-355c-4cf5-8e6a-b74cad5985f9" containerName="extract" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.939440 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.941851 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.944110 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hswk7" Nov 21 13:56:58 crc kubenswrapper[4721]: I1121 13:56:58.948218 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm"] Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.088431 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/23f820aa-61b3-41c7-b322-2c99c8293ff2-webhook-cert\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.088491 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/23f820aa-61b3-41c7-b322-2c99c8293ff2-apiservice-cert\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.088521 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2vrn\" (UniqueName: \"kubernetes.io/projected/23f820aa-61b3-41c7-b322-2c99c8293ff2-kube-api-access-r2vrn\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.190252 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/23f820aa-61b3-41c7-b322-2c99c8293ff2-apiservice-cert\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.190298 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2vrn\" (UniqueName: \"kubernetes.io/projected/23f820aa-61b3-41c7-b322-2c99c8293ff2-kube-api-access-r2vrn\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.190431 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/23f820aa-61b3-41c7-b322-2c99c8293ff2-webhook-cert\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.196692 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/23f820aa-61b3-41c7-b322-2c99c8293ff2-apiservice-cert\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.196753 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/23f820aa-61b3-41c7-b322-2c99c8293ff2-webhook-cert\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.207101 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2vrn\" (UniqueName: \"kubernetes.io/projected/23f820aa-61b3-41c7-b322-2c99c8293ff2-kube-api-access-r2vrn\") pod \"keystone-operator-controller-manager-6c95b69c96-p8ktm\" (UID: \"23f820aa-61b3-41c7-b322-2c99c8293ff2\") " pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.258541 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:56:59 crc kubenswrapper[4721]: I1121 13:56:59.478322 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm"] Nov 21 13:56:59 crc kubenswrapper[4721]: W1121 13:56:59.482170 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23f820aa_61b3_41c7_b322_2c99c8293ff2.slice/crio-997244d8afdd7b32371e4c3e1e8cfbbd009c803e0044f39559f5f5690320b4e7 WatchSource:0}: Error finding container 997244d8afdd7b32371e4c3e1e8cfbbd009c803e0044f39559f5f5690320b4e7: Status 404 returned error can't find the container with id 997244d8afdd7b32371e4c3e1e8cfbbd009c803e0044f39559f5f5690320b4e7 Nov 21 13:57:00 crc kubenswrapper[4721]: I1121 13:57:00.032500 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" event={"ID":"23f820aa-61b3-41c7-b322-2c99c8293ff2","Type":"ContainerStarted","Data":"997244d8afdd7b32371e4c3e1e8cfbbd009c803e0044f39559f5f5690320b4e7"} Nov 21 13:57:05 crc kubenswrapper[4721]: I1121 13:57:05.068498 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" event={"ID":"23f820aa-61b3-41c7-b322-2c99c8293ff2","Type":"ContainerStarted","Data":"ca14f9600c97a04dc67333fd3c14a6bdcc731e906564dcff54a7f95472d7c18e"} Nov 21 13:57:05 crc kubenswrapper[4721]: I1121 13:57:05.069124 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:57:05 crc kubenswrapper[4721]: I1121 13:57:05.069140 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" event={"ID":"23f820aa-61b3-41c7-b322-2c99c8293ff2","Type":"ContainerStarted","Data":"7d79b37769b26f79bef9665589863c5453f1dea92067ec38cec7b17ef6442c31"} Nov 21 13:57:05 crc kubenswrapper[4721]: I1121 13:57:05.086693 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" podStartSLOduration=2.073697275 podStartE2EDuration="7.086675852s" podCreationTimestamp="2025-11-21 13:56:58 +0000 UTC" firstStartedPulling="2025-11-21 13:56:59.484669322 +0000 UTC m=+1257.462560580" lastFinishedPulling="2025-11-21 13:57:04.497647899 +0000 UTC m=+1262.475539157" observedRunningTime="2025-11-21 13:57:05.08622764 +0000 UTC m=+1263.064118898" watchObservedRunningTime="2025-11-21 13:57:05.086675852 +0000 UTC m=+1263.064567110" Nov 21 13:57:09 crc kubenswrapper[4721]: I1121 13:57:09.262150 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-6c95b69c96-p8ktm" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.207669 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-db-create-vvl59"] Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.208846 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.213114 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-4959-account-create-update-tskfp"] Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.214112 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.215980 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-db-secret" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.226335 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-4959-account-create-update-tskfp"] Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.229643 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-vvl59"] Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.392911 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cbz5\" (UniqueName: \"kubernetes.io/projected/2e1cff88-741d-4968-a2ec-677854eebe6b-kube-api-access-9cbz5\") pod \"keystone-4959-account-create-update-tskfp\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.392979 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/228ed5a3-1cc5-4176-a779-16251358296a-operator-scripts\") pod \"keystone-db-create-vvl59\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.393008 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6h46\" (UniqueName: \"kubernetes.io/projected/228ed5a3-1cc5-4176-a779-16251358296a-kube-api-access-v6h46\") pod \"keystone-db-create-vvl59\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.393140 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e1cff88-741d-4968-a2ec-677854eebe6b-operator-scripts\") pod \"keystone-4959-account-create-update-tskfp\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.495321 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e1cff88-741d-4968-a2ec-677854eebe6b-operator-scripts\") pod \"keystone-4959-account-create-update-tskfp\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.495434 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cbz5\" (UniqueName: \"kubernetes.io/projected/2e1cff88-741d-4968-a2ec-677854eebe6b-kube-api-access-9cbz5\") pod \"keystone-4959-account-create-update-tskfp\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.495458 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/228ed5a3-1cc5-4176-a779-16251358296a-operator-scripts\") pod \"keystone-db-create-vvl59\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.495487 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6h46\" (UniqueName: \"kubernetes.io/projected/228ed5a3-1cc5-4176-a779-16251358296a-kube-api-access-v6h46\") pod \"keystone-db-create-vvl59\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.496616 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e1cff88-741d-4968-a2ec-677854eebe6b-operator-scripts\") pod \"keystone-4959-account-create-update-tskfp\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.497360 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/228ed5a3-1cc5-4176-a779-16251358296a-operator-scripts\") pod \"keystone-db-create-vvl59\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.527770 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6h46\" (UniqueName: \"kubernetes.io/projected/228ed5a3-1cc5-4176-a779-16251358296a-kube-api-access-v6h46\") pod \"keystone-db-create-vvl59\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.527960 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cbz5\" (UniqueName: \"kubernetes.io/projected/2e1cff88-741d-4968-a2ec-677854eebe6b-kube-api-access-9cbz5\") pod \"keystone-4959-account-create-update-tskfp\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.529755 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.550755 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.671565 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/rabbitmq-server-0" Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.938693 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-4959-account-create-update-tskfp"] Nov 21 13:57:12 crc kubenswrapper[4721]: I1121 13:57:12.997398 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-vvl59"] Nov 21 13:57:13 crc kubenswrapper[4721]: I1121 13:57:13.119470 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" event={"ID":"2e1cff88-741d-4968-a2ec-677854eebe6b","Type":"ContainerStarted","Data":"14fdb20d376c4a31e6880de0b1b3861891f1e2590ec45d692d66f75245edbd1c"} Nov 21 13:57:13 crc kubenswrapper[4721]: I1121 13:57:13.120520 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-vvl59" event={"ID":"228ed5a3-1cc5-4176-a779-16251358296a","Type":"ContainerStarted","Data":"3526293e979c51f5a214cb5dee79849ad3fb8adb35a0cd689f8f3729dbdc1b9f"} Nov 21 13:57:14 crc kubenswrapper[4721]: I1121 13:57:14.128775 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-vvl59" event={"ID":"228ed5a3-1cc5-4176-a779-16251358296a","Type":"ContainerStarted","Data":"88570b89bd09425c5f62de86b37b025d4a46add5e83abc257872078a4b851e65"} Nov 21 13:57:14 crc kubenswrapper[4721]: I1121 13:57:14.130835 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" event={"ID":"2e1cff88-741d-4968-a2ec-677854eebe6b","Type":"ContainerStarted","Data":"dc6fce84ba40381d3aac7a3015ae4bc8edcdc6f2f0da2a5679b7cfcc1fa130a0"} Nov 21 13:57:14 crc kubenswrapper[4721]: I1121 13:57:14.150294 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-db-create-vvl59" podStartSLOduration=2.150267573 podStartE2EDuration="2.150267573s" podCreationTimestamp="2025-11-21 13:57:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:57:14.146576643 +0000 UTC m=+1272.124467901" watchObservedRunningTime="2025-11-21 13:57:14.150267573 +0000 UTC m=+1272.128158831" Nov 21 13:57:14 crc kubenswrapper[4721]: I1121 13:57:14.168373 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" podStartSLOduration=2.168356763 podStartE2EDuration="2.168356763s" podCreationTimestamp="2025-11-21 13:57:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:57:14.164362765 +0000 UTC m=+1272.142254043" watchObservedRunningTime="2025-11-21 13:57:14.168356763 +0000 UTC m=+1272.146248021" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.090582 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-4nt92"] Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.093366 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.095869 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-index-dockercfg-nkmqz" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.103634 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-4nt92"] Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.139216 4721 generic.go:334] "Generic (PLEG): container finished" podID="228ed5a3-1cc5-4176-a779-16251358296a" containerID="88570b89bd09425c5f62de86b37b025d4a46add5e83abc257872078a4b851e65" exitCode=0 Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.139317 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-vvl59" event={"ID":"228ed5a3-1cc5-4176-a779-16251358296a","Type":"ContainerDied","Data":"88570b89bd09425c5f62de86b37b025d4a46add5e83abc257872078a4b851e65"} Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.141292 4721 generic.go:334] "Generic (PLEG): container finished" podID="2e1cff88-741d-4968-a2ec-677854eebe6b" containerID="dc6fce84ba40381d3aac7a3015ae4bc8edcdc6f2f0da2a5679b7cfcc1fa130a0" exitCode=0 Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.141365 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" event={"ID":"2e1cff88-741d-4968-a2ec-677854eebe6b","Type":"ContainerDied","Data":"dc6fce84ba40381d3aac7a3015ae4bc8edcdc6f2f0da2a5679b7cfcc1fa130a0"} Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.238982 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qw78\" (UniqueName: \"kubernetes.io/projected/6b72df5a-86d9-4406-88b1-4059f7067179-kube-api-access-9qw78\") pod \"horizon-operator-index-4nt92\" (UID: \"6b72df5a-86d9-4406-88b1-4059f7067179\") " pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.340513 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qw78\" (UniqueName: \"kubernetes.io/projected/6b72df5a-86d9-4406-88b1-4059f7067179-kube-api-access-9qw78\") pod \"horizon-operator-index-4nt92\" (UID: \"6b72df5a-86d9-4406-88b1-4059f7067179\") " pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.361026 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qw78\" (UniqueName: \"kubernetes.io/projected/6b72df5a-86d9-4406-88b1-4059f7067179-kube-api-access-9qw78\") pod \"horizon-operator-index-4nt92\" (UID: \"6b72df5a-86d9-4406-88b1-4059f7067179\") " pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.411830 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:15 crc kubenswrapper[4721]: I1121 13:57:15.845167 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-4nt92"] Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.154376 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-4nt92" event={"ID":"6b72df5a-86d9-4406-88b1-4059f7067179","Type":"ContainerStarted","Data":"50555fd208ed931fe19f60baa7189439ea48e3c9dbaa8971c00c8577e02a5a60"} Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.548812 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.556285 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.658527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e1cff88-741d-4968-a2ec-677854eebe6b-operator-scripts\") pod \"2e1cff88-741d-4968-a2ec-677854eebe6b\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.658614 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/228ed5a3-1cc5-4176-a779-16251358296a-operator-scripts\") pod \"228ed5a3-1cc5-4176-a779-16251358296a\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.658658 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6h46\" (UniqueName: \"kubernetes.io/projected/228ed5a3-1cc5-4176-a779-16251358296a-kube-api-access-v6h46\") pod \"228ed5a3-1cc5-4176-a779-16251358296a\" (UID: \"228ed5a3-1cc5-4176-a779-16251358296a\") " Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.658722 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cbz5\" (UniqueName: \"kubernetes.io/projected/2e1cff88-741d-4968-a2ec-677854eebe6b-kube-api-access-9cbz5\") pod \"2e1cff88-741d-4968-a2ec-677854eebe6b\" (UID: \"2e1cff88-741d-4968-a2ec-677854eebe6b\") " Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.659356 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/228ed5a3-1cc5-4176-a779-16251358296a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "228ed5a3-1cc5-4176-a779-16251358296a" (UID: "228ed5a3-1cc5-4176-a779-16251358296a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.659382 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e1cff88-741d-4968-a2ec-677854eebe6b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2e1cff88-741d-4968-a2ec-677854eebe6b" (UID: "2e1cff88-741d-4968-a2ec-677854eebe6b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.663887 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/228ed5a3-1cc5-4176-a779-16251358296a-kube-api-access-v6h46" (OuterVolumeSpecName: "kube-api-access-v6h46") pod "228ed5a3-1cc5-4176-a779-16251358296a" (UID: "228ed5a3-1cc5-4176-a779-16251358296a"). InnerVolumeSpecName "kube-api-access-v6h46". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.664165 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e1cff88-741d-4968-a2ec-677854eebe6b-kube-api-access-9cbz5" (OuterVolumeSpecName: "kube-api-access-9cbz5") pod "2e1cff88-741d-4968-a2ec-677854eebe6b" (UID: "2e1cff88-741d-4968-a2ec-677854eebe6b"). InnerVolumeSpecName "kube-api-access-9cbz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.761014 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cbz5\" (UniqueName: \"kubernetes.io/projected/2e1cff88-741d-4968-a2ec-677854eebe6b-kube-api-access-9cbz5\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.761120 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2e1cff88-741d-4968-a2ec-677854eebe6b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.761150 4721 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/228ed5a3-1cc5-4176-a779-16251358296a-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:16 crc kubenswrapper[4721]: I1121 13:57:16.761181 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6h46\" (UniqueName: \"kubernetes.io/projected/228ed5a3-1cc5-4176-a779-16251358296a-kube-api-access-v6h46\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:17 crc kubenswrapper[4721]: I1121 13:57:17.163830 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-vvl59" event={"ID":"228ed5a3-1cc5-4176-a779-16251358296a","Type":"ContainerDied","Data":"3526293e979c51f5a214cb5dee79849ad3fb8adb35a0cd689f8f3729dbdc1b9f"} Nov 21 13:57:17 crc kubenswrapper[4721]: I1121 13:57:17.163878 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3526293e979c51f5a214cb5dee79849ad3fb8adb35a0cd689f8f3729dbdc1b9f" Nov 21 13:57:17 crc kubenswrapper[4721]: I1121 13:57:17.163844 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-vvl59" Nov 21 13:57:17 crc kubenswrapper[4721]: I1121 13:57:17.165264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" event={"ID":"2e1cff88-741d-4968-a2ec-677854eebe6b","Type":"ContainerDied","Data":"14fdb20d376c4a31e6880de0b1b3861891f1e2590ec45d692d66f75245edbd1c"} Nov 21 13:57:17 crc kubenswrapper[4721]: I1121 13:57:17.165331 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14fdb20d376c4a31e6880de0b1b3861891f1e2590ec45d692d66f75245edbd1c" Nov 21 13:57:17 crc kubenswrapper[4721]: I1121 13:57:17.165281 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-4959-account-create-update-tskfp" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.799456 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-jpwfj"] Nov 21 13:57:22 crc kubenswrapper[4721]: E1121 13:57:22.800278 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e1cff88-741d-4968-a2ec-677854eebe6b" containerName="mariadb-account-create-update" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.800307 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e1cff88-741d-4968-a2ec-677854eebe6b" containerName="mariadb-account-create-update" Nov 21 13:57:22 crc kubenswrapper[4721]: E1121 13:57:22.800335 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="228ed5a3-1cc5-4176-a779-16251358296a" containerName="mariadb-database-create" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.800349 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="228ed5a3-1cc5-4176-a779-16251358296a" containerName="mariadb-database-create" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.800580 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e1cff88-741d-4968-a2ec-677854eebe6b" containerName="mariadb-account-create-update" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.800602 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="228ed5a3-1cc5-4176-a779-16251358296a" containerName="mariadb-database-create" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.801277 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.803573 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-kr8km" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.803731 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.804412 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.805824 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.808788 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-jpwfj"] Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.955419 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc49d3fb-6876-4d26-8e8c-f63f3d680936-config-data\") pod \"keystone-db-sync-jpwfj\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:22 crc kubenswrapper[4721]: I1121 13:57:22.955540 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98cg5\" (UniqueName: \"kubernetes.io/projected/cc49d3fb-6876-4d26-8e8c-f63f3d680936-kube-api-access-98cg5\") pod \"keystone-db-sync-jpwfj\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.057167 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98cg5\" (UniqueName: \"kubernetes.io/projected/cc49d3fb-6876-4d26-8e8c-f63f3d680936-kube-api-access-98cg5\") pod \"keystone-db-sync-jpwfj\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.057322 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc49d3fb-6876-4d26-8e8c-f63f3d680936-config-data\") pod \"keystone-db-sync-jpwfj\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.066140 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc49d3fb-6876-4d26-8e8c-f63f3d680936-config-data\") pod \"keystone-db-sync-jpwfj\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.078951 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98cg5\" (UniqueName: \"kubernetes.io/projected/cc49d3fb-6876-4d26-8e8c-f63f3d680936-kube-api-access-98cg5\") pod \"keystone-db-sync-jpwfj\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.129300 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.203483 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-4nt92" event={"ID":"6b72df5a-86d9-4406-88b1-4059f7067179","Type":"ContainerStarted","Data":"45e41b7ca12cfdb0d2df9b4085bb2e8eb887c1ee26186c3faaae7df44269fc95"} Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.567985 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-4nt92" podStartSLOduration=2.507303333 podStartE2EDuration="8.567964013s" podCreationTimestamp="2025-11-21 13:57:15 +0000 UTC" firstStartedPulling="2025-11-21 13:57:15.852052504 +0000 UTC m=+1273.829943762" lastFinishedPulling="2025-11-21 13:57:21.912713184 +0000 UTC m=+1279.890604442" observedRunningTime="2025-11-21 13:57:23.224801363 +0000 UTC m=+1281.202692651" watchObservedRunningTime="2025-11-21 13:57:23.567964013 +0000 UTC m=+1281.545855281" Nov 21 13:57:23 crc kubenswrapper[4721]: I1121 13:57:23.569258 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-jpwfj"] Nov 21 13:57:23 crc kubenswrapper[4721]: W1121 13:57:23.578336 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc49d3fb_6876_4d26_8e8c_f63f3d680936.slice/crio-1f87e65f4a94499f1eea498edf20e6b654421dd2a274ebb5f5b26a3c1a26720f WatchSource:0}: Error finding container 1f87e65f4a94499f1eea498edf20e6b654421dd2a274ebb5f5b26a3c1a26720f: Status 404 returned error can't find the container with id 1f87e65f4a94499f1eea498edf20e6b654421dd2a274ebb5f5b26a3c1a26720f Nov 21 13:57:24 crc kubenswrapper[4721]: I1121 13:57:24.214568 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" event={"ID":"cc49d3fb-6876-4d26-8e8c-f63f3d680936","Type":"ContainerStarted","Data":"1f87e65f4a94499f1eea498edf20e6b654421dd2a274ebb5f5b26a3c1a26720f"} Nov 21 13:57:25 crc kubenswrapper[4721]: I1121 13:57:25.412394 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:25 crc kubenswrapper[4721]: I1121 13:57:25.412435 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:25 crc kubenswrapper[4721]: I1121 13:57:25.442596 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:32 crc kubenswrapper[4721]: I1121 13:57:32.268166 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" event={"ID":"cc49d3fb-6876-4d26-8e8c-f63f3d680936","Type":"ContainerStarted","Data":"0b494a027580af91e0742234075190c9ff78bb6c712bcc5c32ba21601fad6716"} Nov 21 13:57:32 crc kubenswrapper[4721]: I1121 13:57:32.294752 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" podStartSLOduration=2.553664416 podStartE2EDuration="10.294734666s" podCreationTimestamp="2025-11-21 13:57:22 +0000 UTC" firstStartedPulling="2025-11-21 13:57:23.579757582 +0000 UTC m=+1281.557648840" lastFinishedPulling="2025-11-21 13:57:31.320827822 +0000 UTC m=+1289.298719090" observedRunningTime="2025-11-21 13:57:32.290236914 +0000 UTC m=+1290.268128172" watchObservedRunningTime="2025-11-21 13:57:32.294734666 +0000 UTC m=+1290.272625924" Nov 21 13:57:35 crc kubenswrapper[4721]: I1121 13:57:35.440817 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-index-4nt92" Nov 21 13:57:38 crc kubenswrapper[4721]: I1121 13:57:38.314138 4721 generic.go:334] "Generic (PLEG): container finished" podID="cc49d3fb-6876-4d26-8e8c-f63f3d680936" containerID="0b494a027580af91e0742234075190c9ff78bb6c712bcc5c32ba21601fad6716" exitCode=0 Nov 21 13:57:38 crc kubenswrapper[4721]: I1121 13:57:38.314219 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" event={"ID":"cc49d3fb-6876-4d26-8e8c-f63f3d680936","Type":"ContainerDied","Data":"0b494a027580af91e0742234075190c9ff78bb6c712bcc5c32ba21601fad6716"} Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.640895 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.704941 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc49d3fb-6876-4d26-8e8c-f63f3d680936-config-data\") pod \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.705025 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98cg5\" (UniqueName: \"kubernetes.io/projected/cc49d3fb-6876-4d26-8e8c-f63f3d680936-kube-api-access-98cg5\") pod \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\" (UID: \"cc49d3fb-6876-4d26-8e8c-f63f3d680936\") " Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.710002 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc49d3fb-6876-4d26-8e8c-f63f3d680936-kube-api-access-98cg5" (OuterVolumeSpecName: "kube-api-access-98cg5") pod "cc49d3fb-6876-4d26-8e8c-f63f3d680936" (UID: "cc49d3fb-6876-4d26-8e8c-f63f3d680936"). InnerVolumeSpecName "kube-api-access-98cg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.734606 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc49d3fb-6876-4d26-8e8c-f63f3d680936-config-data" (OuterVolumeSpecName: "config-data") pod "cc49d3fb-6876-4d26-8e8c-f63f3d680936" (UID: "cc49d3fb-6876-4d26-8e8c-f63f3d680936"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.806752 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98cg5\" (UniqueName: \"kubernetes.io/projected/cc49d3fb-6876-4d26-8e8c-f63f3d680936-kube-api-access-98cg5\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:39 crc kubenswrapper[4721]: I1121 13:57:39.806793 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc49d3fb-6876-4d26-8e8c-f63f3d680936-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.332456 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" event={"ID":"cc49d3fb-6876-4d26-8e8c-f63f3d680936","Type":"ContainerDied","Data":"1f87e65f4a94499f1eea498edf20e6b654421dd2a274ebb5f5b26a3c1a26720f"} Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.332499 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f87e65f4a94499f1eea498edf20e6b654421dd2a274ebb5f5b26a3c1a26720f" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.332577 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-jpwfj" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.553258 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-pchtq"] Nov 21 13:57:40 crc kubenswrapper[4721]: E1121 13:57:40.553643 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc49d3fb-6876-4d26-8e8c-f63f3d680936" containerName="keystone-db-sync" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.553663 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc49d3fb-6876-4d26-8e8c-f63f3d680936" containerName="keystone-db-sync" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.553813 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc49d3fb-6876-4d26-8e8c-f63f3d680936" containerName="keystone-db-sync" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.555005 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.558348 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.561205 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"osp-secret" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.561392 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-kr8km" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.561537 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.561722 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.572748 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-pchtq"] Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.720481 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-fernet-keys\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.720653 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-scripts\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.720697 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-credential-keys\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.720711 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mltr\" (UniqueName: \"kubernetes.io/projected/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-kube-api-access-9mltr\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.720812 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-config-data\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.822020 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-scripts\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.822128 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-credential-keys\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.822161 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mltr\" (UniqueName: \"kubernetes.io/projected/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-kube-api-access-9mltr\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.822208 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-config-data\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.822273 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-fernet-keys\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.829163 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-fernet-keys\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.829540 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-credential-keys\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.829375 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-scripts\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.830447 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-config-data\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.846408 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mltr\" (UniqueName: \"kubernetes.io/projected/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-kube-api-access-9mltr\") pod \"keystone-bootstrap-pchtq\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:40 crc kubenswrapper[4721]: I1121 13:57:40.886576 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:41 crc kubenswrapper[4721]: I1121 13:57:41.378051 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-pchtq"] Nov 21 13:57:42 crc kubenswrapper[4721]: I1121 13:57:42.362351 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" event={"ID":"6d09526c-0b2c-43e7-94f1-8ec539bc5f34","Type":"ContainerStarted","Data":"61de2cc920aaf2d85c9ec1764233cc6abaefb7e7a1752dc1149f7c60c6e01ffe"} Nov 21 13:57:42 crc kubenswrapper[4721]: I1121 13:57:42.362989 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" event={"ID":"6d09526c-0b2c-43e7-94f1-8ec539bc5f34","Type":"ContainerStarted","Data":"08a40a5a1b43457a992222cad450a09233358c731b4b1203a6aca37ec77cab7f"} Nov 21 13:57:42 crc kubenswrapper[4721]: I1121 13:57:42.378198 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" podStartSLOduration=2.378179819 podStartE2EDuration="2.378179819s" podCreationTimestamp="2025-11-21 13:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:57:42.376615846 +0000 UTC m=+1300.354507114" watchObservedRunningTime="2025-11-21 13:57:42.378179819 +0000 UTC m=+1300.356071077" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.123665 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67"] Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.125487 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.127551 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-tw86c" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.141288 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67"] Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.283646 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd6fl\" (UniqueName: \"kubernetes.io/projected/3db95ec9-54df-4d1b-9460-a17d596a62ad-kube-api-access-cd6fl\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.283694 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-bundle\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.283729 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-util\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.384846 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd6fl\" (UniqueName: \"kubernetes.io/projected/3db95ec9-54df-4d1b-9460-a17d596a62ad-kube-api-access-cd6fl\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.384893 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-bundle\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.384928 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-util\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.385419 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-util\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.385514 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-bundle\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.402453 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd6fl\" (UniqueName: \"kubernetes.io/projected/3db95ec9-54df-4d1b-9460-a17d596a62ad-kube-api-access-cd6fl\") pod \"c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.444435 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:44 crc kubenswrapper[4721]: I1121 13:57:44.687568 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67"] Nov 21 13:57:44 crc kubenswrapper[4721]: W1121 13:57:44.697557 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3db95ec9_54df_4d1b_9460_a17d596a62ad.slice/crio-8708b47ab4e3f69096aa9dce405cc7f6657fab2ffedcee3a53769dee5f4faa07 WatchSource:0}: Error finding container 8708b47ab4e3f69096aa9dce405cc7f6657fab2ffedcee3a53769dee5f4faa07: Status 404 returned error can't find the container with id 8708b47ab4e3f69096aa9dce405cc7f6657fab2ffedcee3a53769dee5f4faa07 Nov 21 13:57:45 crc kubenswrapper[4721]: I1121 13:57:45.385418 4721 generic.go:334] "Generic (PLEG): container finished" podID="6d09526c-0b2c-43e7-94f1-8ec539bc5f34" containerID="61de2cc920aaf2d85c9ec1764233cc6abaefb7e7a1752dc1149f7c60c6e01ffe" exitCode=0 Nov 21 13:57:45 crc kubenswrapper[4721]: I1121 13:57:45.385554 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" event={"ID":"6d09526c-0b2c-43e7-94f1-8ec539bc5f34","Type":"ContainerDied","Data":"61de2cc920aaf2d85c9ec1764233cc6abaefb7e7a1752dc1149f7c60c6e01ffe"} Nov 21 13:57:45 crc kubenswrapper[4721]: I1121 13:57:45.388708 4721 generic.go:334] "Generic (PLEG): container finished" podID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerID="754daf51c2a42f05ea3ab65b60bdca1a8fd5633f78ab4dc5e7ba3760fc2fb862" exitCode=0 Nov 21 13:57:45 crc kubenswrapper[4721]: I1121 13:57:45.388745 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" event={"ID":"3db95ec9-54df-4d1b-9460-a17d596a62ad","Type":"ContainerDied","Data":"754daf51c2a42f05ea3ab65b60bdca1a8fd5633f78ab4dc5e7ba3760fc2fb862"} Nov 21 13:57:45 crc kubenswrapper[4721]: I1121 13:57:45.388768 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" event={"ID":"3db95ec9-54df-4d1b-9460-a17d596a62ad","Type":"ContainerStarted","Data":"8708b47ab4e3f69096aa9dce405cc7f6657fab2ffedcee3a53769dee5f4faa07"} Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.643235 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.818634 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-scripts\") pod \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.818728 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-fernet-keys\") pod \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.818781 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-config-data\") pod \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.818836 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mltr\" (UniqueName: \"kubernetes.io/projected/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-kube-api-access-9mltr\") pod \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.818885 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-credential-keys\") pod \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\" (UID: \"6d09526c-0b2c-43e7-94f1-8ec539bc5f34\") " Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.824244 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-kube-api-access-9mltr" (OuterVolumeSpecName: "kube-api-access-9mltr") pod "6d09526c-0b2c-43e7-94f1-8ec539bc5f34" (UID: "6d09526c-0b2c-43e7-94f1-8ec539bc5f34"). InnerVolumeSpecName "kube-api-access-9mltr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.824292 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-scripts" (OuterVolumeSpecName: "scripts") pod "6d09526c-0b2c-43e7-94f1-8ec539bc5f34" (UID: "6d09526c-0b2c-43e7-94f1-8ec539bc5f34"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.824698 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6d09526c-0b2c-43e7-94f1-8ec539bc5f34" (UID: "6d09526c-0b2c-43e7-94f1-8ec539bc5f34"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.827328 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6d09526c-0b2c-43e7-94f1-8ec539bc5f34" (UID: "6d09526c-0b2c-43e7-94f1-8ec539bc5f34"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.846355 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-config-data" (OuterVolumeSpecName: "config-data") pod "6d09526c-0b2c-43e7-94f1-8ec539bc5f34" (UID: "6d09526c-0b2c-43e7-94f1-8ec539bc5f34"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.920716 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.920758 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mltr\" (UniqueName: \"kubernetes.io/projected/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-kube-api-access-9mltr\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.920769 4721 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.920778 4721 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:46 crc kubenswrapper[4721]: I1121 13:57:46.920786 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6d09526c-0b2c-43e7-94f1-8ec539bc5f34-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.403099 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.403129 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-pchtq" event={"ID":"6d09526c-0b2c-43e7-94f1-8ec539bc5f34","Type":"ContainerDied","Data":"08a40a5a1b43457a992222cad450a09233358c731b4b1203a6aca37ec77cab7f"} Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.403668 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08a40a5a1b43457a992222cad450a09233358c731b4b1203a6aca37ec77cab7f" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.405922 4721 generic.go:334] "Generic (PLEG): container finished" podID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerID="9317c3740e628f9aac28d7c80ac52bde31b062ab3fe0d80d31a75edaafdcdf2d" exitCode=0 Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.405960 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" event={"ID":"3db95ec9-54df-4d1b-9460-a17d596a62ad","Type":"ContainerDied","Data":"9317c3740e628f9aac28d7c80ac52bde31b062ab3fe0d80d31a75edaafdcdf2d"} Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.489153 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-8fd888fc4-zgr57"] Nov 21 13:57:47 crc kubenswrapper[4721]: E1121 13:57:47.489387 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d09526c-0b2c-43e7-94f1-8ec539bc5f34" containerName="keystone-bootstrap" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.489403 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d09526c-0b2c-43e7-94f1-8ec539bc5f34" containerName="keystone-bootstrap" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.489515 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d09526c-0b2c-43e7-94f1-8ec539bc5f34" containerName="keystone-bootstrap" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.489899 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.494941 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.495307 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.495496 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.495658 4721 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-kr8km" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.510287 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-8fd888fc4-zgr57"] Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.633153 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-fernet-keys\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.633198 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-credential-keys\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.633227 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-config-data\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.633307 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znc8c\" (UniqueName: \"kubernetes.io/projected/2f143660-bade-4c16-ad3a-a7ab1f2c3283-kube-api-access-znc8c\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.633494 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-scripts\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.734531 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-fernet-keys\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.734582 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-credential-keys\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.734610 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-config-data\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.734632 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znc8c\" (UniqueName: \"kubernetes.io/projected/2f143660-bade-4c16-ad3a-a7ab1f2c3283-kube-api-access-znc8c\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.734673 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-scripts\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.739793 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-fernet-keys\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.740549 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-credential-keys\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.741040 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-config-data\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.741431 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f143660-bade-4c16-ad3a-a7ab1f2c3283-scripts\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.751276 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znc8c\" (UniqueName: \"kubernetes.io/projected/2f143660-bade-4c16-ad3a-a7ab1f2c3283-kube-api-access-znc8c\") pod \"keystone-8fd888fc4-zgr57\" (UID: \"2f143660-bade-4c16-ad3a-a7ab1f2c3283\") " pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:47 crc kubenswrapper[4721]: I1121 13:57:47.864440 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:57:48 crc kubenswrapper[4721]: I1121 13:57:48.310099 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-8fd888fc4-zgr57"] Nov 21 13:57:48 crc kubenswrapper[4721]: W1121 13:57:48.322149 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f143660_bade_4c16_ad3a_a7ab1f2c3283.slice/crio-aeb9fc6cbf2ea0bb6c3a2dc0c63ea3ded33396df0e1458e744a3ecf65e616ae7 WatchSource:0}: Error finding container aeb9fc6cbf2ea0bb6c3a2dc0c63ea3ded33396df0e1458e744a3ecf65e616ae7: Status 404 returned error can't find the container with id aeb9fc6cbf2ea0bb6c3a2dc0c63ea3ded33396df0e1458e744a3ecf65e616ae7 Nov 21 13:57:48 crc kubenswrapper[4721]: I1121 13:57:48.414558 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" event={"ID":"2f143660-bade-4c16-ad3a-a7ab1f2c3283","Type":"ContainerStarted","Data":"aeb9fc6cbf2ea0bb6c3a2dc0c63ea3ded33396df0e1458e744a3ecf65e616ae7"} Nov 21 13:57:48 crc kubenswrapper[4721]: I1121 13:57:48.418724 4721 generic.go:334] "Generic (PLEG): container finished" podID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerID="29f03b91b851168d52169a54c6aa060e93f3d7f177fe29ed59b4f17bda27d419" exitCode=0 Nov 21 13:57:48 crc kubenswrapper[4721]: I1121 13:57:48.418760 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" event={"ID":"3db95ec9-54df-4d1b-9460-a17d596a62ad","Type":"ContainerDied","Data":"29f03b91b851168d52169a54c6aa060e93f3d7f177fe29ed59b4f17bda27d419"} Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.427399 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" event={"ID":"2f143660-bade-4c16-ad3a-a7ab1f2c3283","Type":"ContainerStarted","Data":"48d97144439e97b73f3b5705c6da860b47af5b6c1d06b96cc0b5b24943d60233"} Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.454574 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" podStartSLOduration=2.454539678 podStartE2EDuration="2.454539678s" podCreationTimestamp="2025-11-21 13:57:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 13:57:49.450391055 +0000 UTC m=+1307.428282313" watchObservedRunningTime="2025-11-21 13:57:49.454539678 +0000 UTC m=+1307.432430966" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.692061 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.868437 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd6fl\" (UniqueName: \"kubernetes.io/projected/3db95ec9-54df-4d1b-9460-a17d596a62ad-kube-api-access-cd6fl\") pod \"3db95ec9-54df-4d1b-9460-a17d596a62ad\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.869043 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-util\") pod \"3db95ec9-54df-4d1b-9460-a17d596a62ad\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.869120 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-bundle\") pod \"3db95ec9-54df-4d1b-9460-a17d596a62ad\" (UID: \"3db95ec9-54df-4d1b-9460-a17d596a62ad\") " Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.870124 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-bundle" (OuterVolumeSpecName: "bundle") pod "3db95ec9-54df-4d1b-9460-a17d596a62ad" (UID: "3db95ec9-54df-4d1b-9460-a17d596a62ad"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.874027 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3db95ec9-54df-4d1b-9460-a17d596a62ad-kube-api-access-cd6fl" (OuterVolumeSpecName: "kube-api-access-cd6fl") pod "3db95ec9-54df-4d1b-9460-a17d596a62ad" (UID: "3db95ec9-54df-4d1b-9460-a17d596a62ad"). InnerVolumeSpecName "kube-api-access-cd6fl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.905287 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-util" (OuterVolumeSpecName: "util") pod "3db95ec9-54df-4d1b-9460-a17d596a62ad" (UID: "3db95ec9-54df-4d1b-9460-a17d596a62ad"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.970954 4721 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-util\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.971000 4721 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3db95ec9-54df-4d1b-9460-a17d596a62ad-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:49 crc kubenswrapper[4721]: I1121 13:57:49.971013 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd6fl\" (UniqueName: \"kubernetes.io/projected/3db95ec9-54df-4d1b-9460-a17d596a62ad-kube-api-access-cd6fl\") on node \"crc\" DevicePath \"\"" Nov 21 13:57:50 crc kubenswrapper[4721]: I1121 13:57:50.440134 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" event={"ID":"3db95ec9-54df-4d1b-9460-a17d596a62ad","Type":"ContainerDied","Data":"8708b47ab4e3f69096aa9dce405cc7f6657fab2ffedcee3a53769dee5f4faa07"} Nov 21 13:57:50 crc kubenswrapper[4721]: I1121 13:57:50.440206 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67" Nov 21 13:57:50 crc kubenswrapper[4721]: I1121 13:57:50.440209 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8708b47ab4e3f69096aa9dce405cc7f6657fab2ffedcee3a53769dee5f4faa07" Nov 21 13:57:50 crc kubenswrapper[4721]: I1121 13:57:50.440628 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.850549 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm"] Nov 21 13:58:02 crc kubenswrapper[4721]: E1121 13:58:02.851452 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="extract" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.851471 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="extract" Nov 21 13:58:02 crc kubenswrapper[4721]: E1121 13:58:02.851487 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="pull" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.851495 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="pull" Nov 21 13:58:02 crc kubenswrapper[4721]: E1121 13:58:02.851521 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="util" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.851530 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="util" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.851662 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="3db95ec9-54df-4d1b-9460-a17d596a62ad" containerName="extract" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.852298 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.856810 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.856888 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-ng7wl" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.861988 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm"] Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.954430 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-webhook-cert\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.954675 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-apiservice-cert\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:02 crc kubenswrapper[4721]: I1121 13:58:02.954800 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xrjg\" (UniqueName: \"kubernetes.io/projected/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-kube-api-access-9xrjg\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.057575 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-webhook-cert\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.057637 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-apiservice-cert\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.057677 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xrjg\" (UniqueName: \"kubernetes.io/projected/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-kube-api-access-9xrjg\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.059817 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.078151 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-webhook-cert\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.079452 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-apiservice-cert\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.080239 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xrjg\" (UniqueName: \"kubernetes.io/projected/ec9a4dcc-2e70-40e3-a61e-7d8fff27c092-kube-api-access-9xrjg\") pod \"horizon-operator-controller-manager-7f9c89dbc-n6lwm\" (UID: \"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092\") " pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.178487 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-ng7wl" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.187007 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:03 crc kubenswrapper[4721]: I1121 13:58:03.682183 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm"] Nov 21 13:58:03 crc kubenswrapper[4721]: W1121 13:58:03.689186 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec9a4dcc_2e70_40e3_a61e_7d8fff27c092.slice/crio-101e7119c9504effbee42a773bcd53e81063caee17c3165cc48a3c7b933b3b5c WatchSource:0}: Error finding container 101e7119c9504effbee42a773bcd53e81063caee17c3165cc48a3c7b933b3b5c: Status 404 returned error can't find the container with id 101e7119c9504effbee42a773bcd53e81063caee17c3165cc48a3c7b933b3b5c Nov 21 13:58:04 crc kubenswrapper[4721]: I1121 13:58:04.541625 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" event={"ID":"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092","Type":"ContainerStarted","Data":"101e7119c9504effbee42a773bcd53e81063caee17c3165cc48a3c7b933b3b5c"} Nov 21 13:58:05 crc kubenswrapper[4721]: I1121 13:58:05.262199 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:58:05 crc kubenswrapper[4721]: I1121 13:58:05.262477 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:58:07 crc kubenswrapper[4721]: I1121 13:58:07.562425 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" event={"ID":"ec9a4dcc-2e70-40e3-a61e-7d8fff27c092","Type":"ContainerStarted","Data":"03176a89266314a6240e98b88a9769027fda239fd7524dede419bcc7030d5201"} Nov 21 13:58:07 crc kubenswrapper[4721]: I1121 13:58:07.562993 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:07 crc kubenswrapper[4721]: I1121 13:58:07.586434 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" podStartSLOduration=2.3457667349999998 podStartE2EDuration="5.586398203s" podCreationTimestamp="2025-11-21 13:58:02 +0000 UTC" firstStartedPulling="2025-11-21 13:58:03.691635424 +0000 UTC m=+1321.669526682" lastFinishedPulling="2025-11-21 13:58:06.932266892 +0000 UTC m=+1324.910158150" observedRunningTime="2025-11-21 13:58:07.580089621 +0000 UTC m=+1325.557980899" watchObservedRunningTime="2025-11-21 13:58:07.586398203 +0000 UTC m=+1325.564289501" Nov 21 13:58:13 crc kubenswrapper[4721]: I1121 13:58:13.191741 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7f9c89dbc-n6lwm" Nov 21 13:58:19 crc kubenswrapper[4721]: I1121 13:58:19.355211 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/keystone-8fd888fc4-zgr57" Nov 21 13:58:35 crc kubenswrapper[4721]: I1121 13:58:35.262516 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:58:35 crc kubenswrapper[4721]: I1121 13:58:35.263384 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.273546 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-5v8sx/must-gather-44tfc"] Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.274670 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.276074 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5v8sx"/"kube-root-ca.crt" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.276214 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-5v8sx"/"openshift-service-ca.crt" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.370495 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5v8sx/must-gather-44tfc"] Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.432995 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370b67c-066a-43da-933e-fff17d0b0fae-must-gather-output\") pod \"must-gather-44tfc\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.433380 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlf7j\" (UniqueName: \"kubernetes.io/projected/1370b67c-066a-43da-933e-fff17d0b0fae-kube-api-access-jlf7j\") pod \"must-gather-44tfc\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.535343 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370b67c-066a-43da-933e-fff17d0b0fae-must-gather-output\") pod \"must-gather-44tfc\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.535704 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlf7j\" (UniqueName: \"kubernetes.io/projected/1370b67c-066a-43da-933e-fff17d0b0fae-kube-api-access-jlf7j\") pod \"must-gather-44tfc\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.536194 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370b67c-066a-43da-933e-fff17d0b0fae-must-gather-output\") pod \"must-gather-44tfc\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.561822 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlf7j\" (UniqueName: \"kubernetes.io/projected/1370b67c-066a-43da-933e-fff17d0b0fae-kube-api-access-jlf7j\") pod \"must-gather-44tfc\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:40 crc kubenswrapper[4721]: I1121 13:58:40.593279 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 13:58:41 crc kubenswrapper[4721]: I1121 13:58:41.010797 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-5v8sx/must-gather-44tfc"] Nov 21 13:58:41 crc kubenswrapper[4721]: I1121 13:58:41.810917 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5v8sx/must-gather-44tfc" event={"ID":"1370b67c-066a-43da-933e-fff17d0b0fae","Type":"ContainerStarted","Data":"0d44fb27e40d1775a993a59382b90ab84fd96fea608db7c1586689200a78a7c3"} Nov 21 13:58:50 crc kubenswrapper[4721]: I1121 13:58:50.881405 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5v8sx/must-gather-44tfc" event={"ID":"1370b67c-066a-43da-933e-fff17d0b0fae","Type":"ContainerStarted","Data":"6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7"} Nov 21 13:58:51 crc kubenswrapper[4721]: I1121 13:58:51.891768 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5v8sx/must-gather-44tfc" event={"ID":"1370b67c-066a-43da-933e-fff17d0b0fae","Type":"ContainerStarted","Data":"9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7"} Nov 21 13:58:51 crc kubenswrapper[4721]: I1121 13:58:51.907564 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-5v8sx/must-gather-44tfc" podStartSLOduration=2.312809283 podStartE2EDuration="11.907544894s" podCreationTimestamp="2025-11-21 13:58:40 +0000 UTC" firstStartedPulling="2025-11-21 13:58:41.021738358 +0000 UTC m=+1358.999629626" lastFinishedPulling="2025-11-21 13:58:50.616473979 +0000 UTC m=+1368.594365237" observedRunningTime="2025-11-21 13:58:51.902907638 +0000 UTC m=+1369.880798896" watchObservedRunningTime="2025-11-21 13:58:51.907544894 +0000 UTC m=+1369.885436152" Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.261966 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.262641 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.262696 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.263420 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a1c015fb42138002e1bbf6657d93a4f584890b9fe82b0b256353c923999d7d76"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.263496 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://a1c015fb42138002e1bbf6657d93a4f584890b9fe82b0b256353c923999d7d76" gracePeriod=600 Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.985242 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="a1c015fb42138002e1bbf6657d93a4f584890b9fe82b0b256353c923999d7d76" exitCode=0 Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.985284 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"a1c015fb42138002e1bbf6657d93a4f584890b9fe82b0b256353c923999d7d76"} Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.985568 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerStarted","Data":"bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59"} Nov 21 13:59:05 crc kubenswrapper[4721]: I1121 13:59:05.985590 4721 scope.go:117] "RemoveContainer" containerID="35f8b67009daee90d3db515f042a09a64d87fb15b884ad370362a42ff402804a" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.183274 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/util/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.342001 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/util/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.362348 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/pull/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.400659 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/pull/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.560022 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/extract/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.564445 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/util/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.568296 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7abe4676e9c7174a0976b528ff13527e30f787694a732dea185c78a27c2fr87_4316bbef-eb0e-42b5-b59a-1d4dc304aa78/pull/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.732380 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/util/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.885650 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/pull/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.885990 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/util/0.log" Nov 21 13:59:23 crc kubenswrapper[4721]: I1121 13:59:23.893152 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.041786 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/util/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.060675 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/extract/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.093753 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590lf9rm_0fcc1f5e-b89d-488a-847d-820b59e1c53f/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.220559 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/util/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.367785 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/util/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.379694 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.402847 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.546753 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.564903 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/extract/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.569464 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ab83952fd3ab379d0f5a55f30eeab7d10e010bcd327dbcc65a9d017a8enkbnc_4334b518-e582-4308-a3e7-cbb14d281b61/util/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.714177 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/util/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.892552 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.908766 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/pull/0.log" Nov 21 13:59:24 crc kubenswrapper[4721]: I1121 13:59:24.917159 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/util/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.092410 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/pull/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.105967 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/extract/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.136356 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c0e3116367cde1c1b1dc1212fd06488092cd2756a19979d543913d662dw8s67_3db95ec9-54df-4d1b-9460-a17d596a62ad/util/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.264764 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/util/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.425734 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/pull/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.456022 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/pull/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.479111 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/util/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.638870 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/util/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.639421 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/pull/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.643951 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7d7e4671d7d05874436e844b5cf6d94e49a6a946d772a1ed2117581bcvtqfc_66f9acf0-355c-4cf5-8e6a-b74cad5985f9/extract/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.792673 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7f9c89dbc-n6lwm_ec9a4dcc-2e70-40e3-a61e-7d8fff27c092/manager/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.853516 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-index-4nt92_6b72df5a-86d9-4406-88b1-4059f7067179/registry-server/0.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.948071 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-87d79d4dc-76n7r_18246f53-8cd9-4eae-85d8-d7543af4d397/manager/1.log" Nov 21 13:59:25 crc kubenswrapper[4721]: I1121 13:59:25.995776 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-87d79d4dc-76n7r_18246f53-8cd9-4eae-85d8-d7543af4d397/kube-rbac-proxy/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.096804 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-87d79d4dc-76n7r_18246f53-8cd9-4eae-85d8-d7543af4d397/manager/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.129645 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-index-4zsjc_c90b21c7-7bc7-4c0d-9893-63f8d9d784fc/registry-server/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.242102 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6c95b69c96-p8ktm_23f820aa-61b3-41c7-b322-2c99c8293ff2/kube-rbac-proxy/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.316872 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6c95b69c96-p8ktm_23f820aa-61b3-41c7-b322-2c99c8293ff2/manager/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.356129 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-index-4pj9l_aa287363-9dcc-402b-ba8f-032187456d2c/registry-server/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.470670 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-d9b6869ff-qsxxl_3dbd2cda-dd31-423b-9510-c6afd1fc82f9/kube-rbac-proxy/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.511589 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-d9b6869ff-qsxxl_3dbd2cda-dd31-423b-9510-c6afd1fc82f9/manager/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.565614 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-index-g7mgz_3b5273ec-581b-4d8c-a334-d063b3bef2e7/registry-server/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.668713 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-779fc9694b-j7fth_c76b426d-f716-45db-bb4d-75d15156d323/operator/0.log" Nov 21 13:59:26 crc kubenswrapper[4721]: I1121 13:59:26.724444 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-index-fsccc_b7e7c40c-9247-4c76-bbf9-d6adc9849eab/registry-server/0.log" Nov 21 13:59:40 crc kubenswrapper[4721]: I1121 13:59:40.510918 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4n9g5_407861d7-5569-41aa-bcca-0250e213d7c4/control-plane-machine-set-operator/0.log" Nov 21 13:59:40 crc kubenswrapper[4721]: I1121 13:59:40.641462 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-tcjm6_a6ce2425-0836-4d18-95b7-cdf56c182b25/kube-rbac-proxy/0.log" Nov 21 13:59:40 crc kubenswrapper[4721]: I1121 13:59:40.745605 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-tcjm6_a6ce2425-0836-4d18-95b7-cdf56c182b25/machine-api-operator/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.030365 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-72qg5_b56a61e0-296e-4fae-adf6-9c2c5b86444f/kube-rbac-proxy/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.037326 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-72qg5_b56a61e0-296e-4fae-adf6-9c2c5b86444f/controller/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.237295 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-frr-files/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.417218 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-metrics/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.418423 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-frr-files/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.437535 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-reloader/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.454030 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-reloader/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.673190 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-frr-files/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.687875 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-metrics/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.688759 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-metrics/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.693725 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-reloader/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.842940 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-metrics/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.867370 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-frr-files/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.867406 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/cp-reloader/0.log" Nov 21 13:59:56 crc kubenswrapper[4721]: I1121 13:59:56.918204 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/controller/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.098921 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/frr-metrics/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.099617 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/kube-rbac-proxy/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.163299 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/frr/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.168036 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/kube-rbac-proxy-frr/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.293880 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sfbbq_d7e86b33-b3dd-4969-8f78-8749e9c9ef6c/reloader/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.370187 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-8ft7x_dcc25858-9526-4309-a78c-4031fc67b915/frr-k8s-webhook-server/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.498593 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-58688b66bd-4mdqb_caad2501-e4ef-4058-9311-d2192becf981/manager/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.559620 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6dc6b575b-bq4b5_95f7f7f4-5a42-4fae-a1ea-249e844aec92/webhook-server/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.754539 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-d97pd_e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7/kube-rbac-proxy/0.log" Nov 21 13:59:57 crc kubenswrapper[4721]: I1121 13:59:57.764142 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-d97pd_e6f9b6f9-1ef4-41f2-85cc-583980cdf8a7/speaker/0.log" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.133248 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8"] Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.134523 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.137241 4721 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.139580 4721 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.146556 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8"] Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.260390 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d89dfd-7411-43ef-a1bc-d66edd43837f-config-volume\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.260470 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6c7j\" (UniqueName: \"kubernetes.io/projected/b9d89dfd-7411-43ef-a1bc-d66edd43837f-kube-api-access-l6c7j\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.260496 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d89dfd-7411-43ef-a1bc-d66edd43837f-secret-volume\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.362153 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d89dfd-7411-43ef-a1bc-d66edd43837f-config-volume\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.362286 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6c7j\" (UniqueName: \"kubernetes.io/projected/b9d89dfd-7411-43ef-a1bc-d66edd43837f-kube-api-access-l6c7j\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.362315 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d89dfd-7411-43ef-a1bc-d66edd43837f-secret-volume\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.363542 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d89dfd-7411-43ef-a1bc-d66edd43837f-config-volume\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.374924 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d89dfd-7411-43ef-a1bc-d66edd43837f-secret-volume\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.392267 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6c7j\" (UniqueName: \"kubernetes.io/projected/b9d89dfd-7411-43ef-a1bc-d66edd43837f-kube-api-access-l6c7j\") pod \"collect-profiles-29395560-prgz8\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.454816 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:00 crc kubenswrapper[4721]: I1121 14:00:00.678662 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8"] Nov 21 14:00:00 crc kubenswrapper[4721]: W1121 14:00:00.687591 4721 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9d89dfd_7411_43ef_a1bc_d66edd43837f.slice/crio-6f1d09c21d64f9b0f45b018b50b3ba7706053e02df9f6e89904b2f5b75de322e WatchSource:0}: Error finding container 6f1d09c21d64f9b0f45b018b50b3ba7706053e02df9f6e89904b2f5b75de322e: Status 404 returned error can't find the container with id 6f1d09c21d64f9b0f45b018b50b3ba7706053e02df9f6e89904b2f5b75de322e Nov 21 14:00:01 crc kubenswrapper[4721]: I1121 14:00:01.349506 4721 generic.go:334] "Generic (PLEG): container finished" podID="b9d89dfd-7411-43ef-a1bc-d66edd43837f" containerID="7c70ddf095cf5f52c30bb116a888a3f5fdee31c784538946f525f7a9666825b6" exitCode=0 Nov 21 14:00:01 crc kubenswrapper[4721]: I1121 14:00:01.349571 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" event={"ID":"b9d89dfd-7411-43ef-a1bc-d66edd43837f","Type":"ContainerDied","Data":"7c70ddf095cf5f52c30bb116a888a3f5fdee31c784538946f525f7a9666825b6"} Nov 21 14:00:01 crc kubenswrapper[4721]: I1121 14:00:01.349770 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" event={"ID":"b9d89dfd-7411-43ef-a1bc-d66edd43837f","Type":"ContainerStarted","Data":"6f1d09c21d64f9b0f45b018b50b3ba7706053e02df9f6e89904b2f5b75de322e"} Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.609107 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.803700 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6c7j\" (UniqueName: \"kubernetes.io/projected/b9d89dfd-7411-43ef-a1bc-d66edd43837f-kube-api-access-l6c7j\") pod \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.803808 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d89dfd-7411-43ef-a1bc-d66edd43837f-secret-volume\") pod \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.803921 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d89dfd-7411-43ef-a1bc-d66edd43837f-config-volume\") pod \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\" (UID: \"b9d89dfd-7411-43ef-a1bc-d66edd43837f\") " Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.804678 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9d89dfd-7411-43ef-a1bc-d66edd43837f-config-volume" (OuterVolumeSpecName: "config-volume") pod "b9d89dfd-7411-43ef-a1bc-d66edd43837f" (UID: "b9d89dfd-7411-43ef-a1bc-d66edd43837f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.809681 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d89dfd-7411-43ef-a1bc-d66edd43837f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b9d89dfd-7411-43ef-a1bc-d66edd43837f" (UID: "b9d89dfd-7411-43ef-a1bc-d66edd43837f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.809696 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9d89dfd-7411-43ef-a1bc-d66edd43837f-kube-api-access-l6c7j" (OuterVolumeSpecName: "kube-api-access-l6c7j") pod "b9d89dfd-7411-43ef-a1bc-d66edd43837f" (UID: "b9d89dfd-7411-43ef-a1bc-d66edd43837f"). InnerVolumeSpecName "kube-api-access-l6c7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.905728 4721 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b9d89dfd-7411-43ef-a1bc-d66edd43837f-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.905761 4721 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9d89dfd-7411-43ef-a1bc-d66edd43837f-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:00:02 crc kubenswrapper[4721]: I1121 14:00:02.905770 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6c7j\" (UniqueName: \"kubernetes.io/projected/b9d89dfd-7411-43ef-a1bc-d66edd43837f-kube-api-access-l6c7j\") on node \"crc\" DevicePath \"\"" Nov 21 14:00:03 crc kubenswrapper[4721]: I1121 14:00:03.363677 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" event={"ID":"b9d89dfd-7411-43ef-a1bc-d66edd43837f","Type":"ContainerDied","Data":"6f1d09c21d64f9b0f45b018b50b3ba7706053e02df9f6e89904b2f5b75de322e"} Nov 21 14:00:03 crc kubenswrapper[4721]: I1121 14:00:03.363969 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f1d09c21d64f9b0f45b018b50b3ba7706053e02df9f6e89904b2f5b75de322e" Nov 21 14:00:03 crc kubenswrapper[4721]: I1121 14:00:03.363722 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-prgz8" Nov 21 14:00:05 crc kubenswrapper[4721]: I1121 14:00:05.914247 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fbmnk"] Nov 21 14:00:05 crc kubenswrapper[4721]: E1121 14:00:05.914939 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d89dfd-7411-43ef-a1bc-d66edd43837f" containerName="collect-profiles" Nov 21 14:00:05 crc kubenswrapper[4721]: I1121 14:00:05.914956 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d89dfd-7411-43ef-a1bc-d66edd43837f" containerName="collect-profiles" Nov 21 14:00:05 crc kubenswrapper[4721]: I1121 14:00:05.915159 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d89dfd-7411-43ef-a1bc-d66edd43837f" containerName="collect-profiles" Nov 21 14:00:05 crc kubenswrapper[4721]: I1121 14:00:05.916309 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:05 crc kubenswrapper[4721]: I1121 14:00:05.926945 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fbmnk"] Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.050036 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-catalog-content\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.050141 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-utilities\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.050183 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v29z\" (UniqueName: \"kubernetes.io/projected/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-kube-api-access-5v29z\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.151399 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-catalog-content\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.151534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-utilities\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.151577 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v29z\" (UniqueName: \"kubernetes.io/projected/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-kube-api-access-5v29z\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.152045 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-catalog-content\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.152083 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-utilities\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.173362 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v29z\" (UniqueName: \"kubernetes.io/projected/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-kube-api-access-5v29z\") pod \"certified-operators-fbmnk\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.235824 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:06 crc kubenswrapper[4721]: I1121 14:00:06.512135 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fbmnk"] Nov 21 14:00:07 crc kubenswrapper[4721]: I1121 14:00:07.394729 4721 generic.go:334] "Generic (PLEG): container finished" podID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerID="61979e1ddc822a53da913282c37d9e65942fe4dcb2a7c3ccb15c2e6eef95c693" exitCode=0 Nov 21 14:00:07 crc kubenswrapper[4721]: I1121 14:00:07.394785 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbmnk" event={"ID":"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1","Type":"ContainerDied","Data":"61979e1ddc822a53da913282c37d9e65942fe4dcb2a7c3ccb15c2e6eef95c693"} Nov 21 14:00:07 crc kubenswrapper[4721]: I1121 14:00:07.395019 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbmnk" event={"ID":"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1","Type":"ContainerStarted","Data":"d8debd2cf9ed06ac08cbd8480d33513399724b8d5c009974999d6f0b12a17197"} Nov 21 14:00:07 crc kubenswrapper[4721]: I1121 14:00:07.397857 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:00:09 crc kubenswrapper[4721]: I1121 14:00:09.410177 4721 generic.go:334] "Generic (PLEG): container finished" podID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerID="c9b29085991917d49ddc9007c74eaf0c8aa1623934bbdaacf11cad1244af64ba" exitCode=0 Nov 21 14:00:09 crc kubenswrapper[4721]: I1121 14:00:09.410290 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbmnk" event={"ID":"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1","Type":"ContainerDied","Data":"c9b29085991917d49ddc9007c74eaf0c8aa1623934bbdaacf11cad1244af64ba"} Nov 21 14:00:10 crc kubenswrapper[4721]: I1121 14:00:10.418378 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbmnk" event={"ID":"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1","Type":"ContainerStarted","Data":"08c7c4b609f0c813cc551bbef1f732fab7af36db6c20b520d208cc7e4114ff50"} Nov 21 14:00:10 crc kubenswrapper[4721]: I1121 14:00:10.438550 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fbmnk" podStartSLOduration=2.765180252 podStartE2EDuration="5.438534266s" podCreationTimestamp="2025-11-21 14:00:05 +0000 UTC" firstStartedPulling="2025-11-21 14:00:07.397571272 +0000 UTC m=+1445.375462550" lastFinishedPulling="2025-11-21 14:00:10.070925306 +0000 UTC m=+1448.048816564" observedRunningTime="2025-11-21 14:00:10.437132198 +0000 UTC m=+1448.415023476" watchObservedRunningTime="2025-11-21 14:00:10.438534266 +0000 UTC m=+1448.416425524" Nov 21 14:00:10 crc kubenswrapper[4721]: I1121 14:00:10.582664 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_keystone-4959-account-create-update-tskfp_2e1cff88-741d-4968-a2ec-677854eebe6b/mariadb-account-create-update/0.log" Nov 21 14:00:10 crc kubenswrapper[4721]: I1121 14:00:10.850738 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_keystone-8fd888fc4-zgr57_2f143660-bade-4c16-ad3a-a7ab1f2c3283/keystone-api/0.log" Nov 21 14:00:10 crc kubenswrapper[4721]: I1121 14:00:10.895592 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_keystone-bootstrap-pchtq_6d09526c-0b2c-43e7-94f1-8ec539bc5f34/keystone-bootstrap/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.065500 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_keystone-db-create-vvl59_228ed5a3-1cc5-4176-a779-16251358296a/mariadb-database-create/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.093216 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_keystone-db-sync-jpwfj_cc49d3fb-6876-4d26-8e8c-f63f3d680936/keystone-db-sync/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.323447 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_memcached-0_ffed5669-7436-4a05-a10b-8decf1eee90c/memcached/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.351232 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-0_d3b77c54-c410-4bfc-867b-3ba283ab4d98/mysql-bootstrap/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.612107 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-1_75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1/mysql-bootstrap/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.633202 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-0_d3b77c54-c410-4bfc-867b-3ba283ab4d98/galera/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.677134 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-0_d3b77c54-c410-4bfc-867b-3ba283ab4d98/mysql-bootstrap/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.781147 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-1_75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1/mysql-bootstrap/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.833713 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-1_75f7339d-3bf8-4bea-a3ec-31f3de2bcbf1/galera/0.log" Nov 21 14:00:11 crc kubenswrapper[4721]: I1121 14:00:11.890693 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-2_29905f76-0ed3-4b86-b2a8-5b9da17c225a/mysql-bootstrap/0.log" Nov 21 14:00:12 crc kubenswrapper[4721]: I1121 14:00:12.046444 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-2_29905f76-0ed3-4b86-b2a8-5b9da17c225a/mysql-bootstrap/0.log" Nov 21 14:00:12 crc kubenswrapper[4721]: I1121 14:00:12.079870 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_openstack-galera-2_29905f76-0ed3-4b86-b2a8-5b9da17c225a/galera/0.log" Nov 21 14:00:12 crc kubenswrapper[4721]: I1121 14:00:12.126522 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_rabbitmq-server-0_73ce16fc-de75-4ab2-a732-36a9b1c14c3a/setup-container/0.log" Nov 21 14:00:12 crc kubenswrapper[4721]: I1121 14:00:12.276252 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_rabbitmq-server-0_73ce16fc-de75-4ab2-a732-36a9b1c14c3a/setup-container/0.log" Nov 21 14:00:12 crc kubenswrapper[4721]: I1121 14:00:12.287431 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/horizon-kuttl-tests_rabbitmq-server-0_73ce16fc-de75-4ab2-a732-36a9b1c14c3a/rabbitmq/0.log" Nov 21 14:00:16 crc kubenswrapper[4721]: I1121 14:00:16.236685 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:16 crc kubenswrapper[4721]: I1121 14:00:16.237282 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:16 crc kubenswrapper[4721]: I1121 14:00:16.278152 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:16 crc kubenswrapper[4721]: I1121 14:00:16.506394 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:16 crc kubenswrapper[4721]: I1121 14:00:16.551815 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fbmnk"] Nov 21 14:00:18 crc kubenswrapper[4721]: I1121 14:00:18.472144 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fbmnk" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="registry-server" containerID="cri-o://08c7c4b609f0c813cc551bbef1f732fab7af36db6c20b520d208cc7e4114ff50" gracePeriod=2 Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.488467 4721 generic.go:334] "Generic (PLEG): container finished" podID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerID="08c7c4b609f0c813cc551bbef1f732fab7af36db6c20b520d208cc7e4114ff50" exitCode=0 Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.488552 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbmnk" event={"ID":"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1","Type":"ContainerDied","Data":"08c7c4b609f0c813cc551bbef1f732fab7af36db6c20b520d208cc7e4114ff50"} Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.803866 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.972642 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-catalog-content\") pod \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.972729 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-utilities\") pod \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.972776 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v29z\" (UniqueName: \"kubernetes.io/projected/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-kube-api-access-5v29z\") pod \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\" (UID: \"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1\") " Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.973669 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-utilities" (OuterVolumeSpecName: "utilities") pod "90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" (UID: "90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:00:20 crc kubenswrapper[4721]: I1121 14:00:20.977949 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-kube-api-access-5v29z" (OuterVolumeSpecName: "kube-api-access-5v29z") pod "90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" (UID: "90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1"). InnerVolumeSpecName "kube-api-access-5v29z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.074736 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.074997 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v29z\" (UniqueName: \"kubernetes.io/projected/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-kube-api-access-5v29z\") on node \"crc\" DevicePath \"\"" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.231803 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" (UID: "90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.277895 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.498448 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbmnk" event={"ID":"90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1","Type":"ContainerDied","Data":"d8debd2cf9ed06ac08cbd8480d33513399724b8d5c009974999d6f0b12a17197"} Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.498511 4721 scope.go:117] "RemoveContainer" containerID="08c7c4b609f0c813cc551bbef1f732fab7af36db6c20b520d208cc7e4114ff50" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.498517 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbmnk" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.514728 4721 scope.go:117] "RemoveContainer" containerID="c9b29085991917d49ddc9007c74eaf0c8aa1623934bbdaacf11cad1244af64ba" Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.529794 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fbmnk"] Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.534880 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fbmnk"] Nov 21 14:00:21 crc kubenswrapper[4721]: I1121 14:00:21.549266 4721 scope.go:117] "RemoveContainer" containerID="61979e1ddc822a53da913282c37d9e65942fe4dcb2a7c3ccb15c2e6eef95c693" Nov 21 14:00:22 crc kubenswrapper[4721]: I1121 14:00:22.990481 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" path="/var/lib/kubelet/pods/90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1/volumes" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.173217 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/extract-utilities/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.351003 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/extract-utilities/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.359973 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/extract-content/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.413933 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/extract-content/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.518755 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/extract-utilities/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.527034 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/extract-content/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.814465 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/extract-utilities/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.881339 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ds4sj_3197bb81-e64b-4ece-811a-0deb7c9cdaeb/registry-server/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.887268 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/extract-utilities/0.log" Nov 21 14:00:24 crc kubenswrapper[4721]: I1121 14:00:24.915280 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/extract-content/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.008895 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/extract-content/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.185363 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/extract-content/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.232481 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/extract-utilities/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.437382 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/util/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.456958 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-phxzd_273316c7-1207-4f09-94c4-ed63761df558/registry-server/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.627058 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/pull/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.648411 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/pull/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.674820 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/util/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.814012 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/extract/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.820371 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/util/0.log" Nov 21 14:00:25 crc kubenswrapper[4721]: I1121 14:00:25.835254 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6sfjr5_64eec2c9-7144-45ac-b7aa-b0fe255f45b4/pull/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.006759 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pzht5_b7ea6e0d-1ecd-461e-a02d-f98f78bff0fa/marketplace-operator/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.052319 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/extract-utilities/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.389734 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/extract-utilities/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.401476 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/extract-content/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.457294 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/extract-content/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.628607 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/extract-utilities/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.637659 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/extract-content/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.676875 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5fg2s_5acaaf53-2bc9-410f-8e00-66627b51e780/registry-server/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.807931 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/extract-utilities/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.953626 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/extract-utilities/0.log" Nov 21 14:00:26 crc kubenswrapper[4721]: I1121 14:00:26.971653 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/extract-content/0.log" Nov 21 14:00:27 crc kubenswrapper[4721]: I1121 14:00:27.027553 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/extract-content/0.log" Nov 21 14:00:27 crc kubenswrapper[4721]: I1121 14:00:27.137812 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/extract-content/0.log" Nov 21 14:00:27 crc kubenswrapper[4721]: I1121 14:00:27.148755 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/extract-utilities/0.log" Nov 21 14:00:27 crc kubenswrapper[4721]: I1121 14:00:27.486112 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-htjrp_4617f0e6-c704-425f-836b-d0a713769a2a/registry-server/0.log" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.579088 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-784sn"] Nov 21 14:00:54 crc kubenswrapper[4721]: E1121 14:00:54.579889 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="registry-server" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.579903 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="registry-server" Nov 21 14:00:54 crc kubenswrapper[4721]: E1121 14:00:54.579919 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="extract-utilities" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.579924 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="extract-utilities" Nov 21 14:00:54 crc kubenswrapper[4721]: E1121 14:00:54.579933 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="extract-content" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.579939 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="extract-content" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.580049 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="90bfc0ed-7edd-4b38-a96b-3fcee3bc40a1" containerName="registry-server" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.583254 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.604091 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-784sn"] Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.677567 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-utilities\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.677938 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-catalog-content\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.677988 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44xpq\" (UniqueName: \"kubernetes.io/projected/43c83dc3-937a-40b3-b4dc-b6c141021001-kube-api-access-44xpq\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.779077 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-utilities\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.779134 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-catalog-content\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.779170 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44xpq\" (UniqueName: \"kubernetes.io/projected/43c83dc3-937a-40b3-b4dc-b6c141021001-kube-api-access-44xpq\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.779693 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-utilities\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.779933 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-catalog-content\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:54 crc kubenswrapper[4721]: I1121 14:00:54.798647 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44xpq\" (UniqueName: \"kubernetes.io/projected/43c83dc3-937a-40b3-b4dc-b6c141021001-kube-api-access-44xpq\") pod \"redhat-marketplace-784sn\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:55 crc kubenswrapper[4721]: I1121 14:00:55.122283 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:00:55 crc kubenswrapper[4721]: I1121 14:00:55.438375 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-784sn"] Nov 21 14:00:55 crc kubenswrapper[4721]: I1121 14:00:55.755185 4721 generic.go:334] "Generic (PLEG): container finished" podID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerID="31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501" exitCode=0 Nov 21 14:00:55 crc kubenswrapper[4721]: I1121 14:00:55.755227 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-784sn" event={"ID":"43c83dc3-937a-40b3-b4dc-b6c141021001","Type":"ContainerDied","Data":"31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501"} Nov 21 14:00:55 crc kubenswrapper[4721]: I1121 14:00:55.755251 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-784sn" event={"ID":"43c83dc3-937a-40b3-b4dc-b6c141021001","Type":"ContainerStarted","Data":"f06b4e15a885551dc213964ae3ff7dd5c04271911f85ea77585b3b8934009d2c"} Nov 21 14:00:56 crc kubenswrapper[4721]: I1121 14:00:56.762753 4721 generic.go:334] "Generic (PLEG): container finished" podID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerID="97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f" exitCode=0 Nov 21 14:00:56 crc kubenswrapper[4721]: I1121 14:00:56.762834 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-784sn" event={"ID":"43c83dc3-937a-40b3-b4dc-b6c141021001","Type":"ContainerDied","Data":"97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f"} Nov 21 14:00:57 crc kubenswrapper[4721]: I1121 14:00:57.771548 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-784sn" event={"ID":"43c83dc3-937a-40b3-b4dc-b6c141021001","Type":"ContainerStarted","Data":"901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a"} Nov 21 14:00:57 crc kubenswrapper[4721]: I1121 14:00:57.787917 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-784sn" podStartSLOduration=2.294970281 podStartE2EDuration="3.787900675s" podCreationTimestamp="2025-11-21 14:00:54 +0000 UTC" firstStartedPulling="2025-11-21 14:00:55.757194006 +0000 UTC m=+1493.735085264" lastFinishedPulling="2025-11-21 14:00:57.2501244 +0000 UTC m=+1495.228015658" observedRunningTime="2025-11-21 14:00:57.785671525 +0000 UTC m=+1495.763562803" watchObservedRunningTime="2025-11-21 14:00:57.787900675 +0000 UTC m=+1495.765791943" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.165510 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-cron-29395561-x4cdw"] Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.166597 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.173997 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-cron-29395561-x4cdw"] Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.301612 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn7wq\" (UniqueName: \"kubernetes.io/projected/8a7085c1-72d8-4d94-ae0b-269f3b76a006-kube-api-access-tn7wq\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.301726 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-fernet-keys\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.301828 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-config-data\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.403412 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-config-data\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.403488 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn7wq\" (UniqueName: \"kubernetes.io/projected/8a7085c1-72d8-4d94-ae0b-269f3b76a006-kube-api-access-tn7wq\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.403534 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-fernet-keys\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.411790 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-config-data\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.418627 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-fernet-keys\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.431738 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn7wq\" (UniqueName: \"kubernetes.io/projected/8a7085c1-72d8-4d94-ae0b-269f3b76a006-kube-api-access-tn7wq\") pod \"keystone-cron-29395561-x4cdw\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.490494 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:00 crc kubenswrapper[4721]: I1121 14:01:00.945505 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-cron-29395561-x4cdw"] Nov 21 14:01:01 crc kubenswrapper[4721]: I1121 14:01:01.799709 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" event={"ID":"8a7085c1-72d8-4d94-ae0b-269f3b76a006","Type":"ContainerStarted","Data":"34a3309c917dc0b1be4ffdf850f4bbe60b1a7286807bd9e8de75f93f93c381fc"} Nov 21 14:01:01 crc kubenswrapper[4721]: I1121 14:01:01.800105 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" event={"ID":"8a7085c1-72d8-4d94-ae0b-269f3b76a006","Type":"ContainerStarted","Data":"a17953c264a8a3759ab85286e1079af3f8d0414f7c72ec25dfcca6ab1ce4524f"} Nov 21 14:01:03 crc kubenswrapper[4721]: I1121 14:01:03.814581 4721 generic.go:334] "Generic (PLEG): container finished" podID="8a7085c1-72d8-4d94-ae0b-269f3b76a006" containerID="34a3309c917dc0b1be4ffdf850f4bbe60b1a7286807bd9e8de75f93f93c381fc" exitCode=0 Nov 21 14:01:03 crc kubenswrapper[4721]: I1121 14:01:03.814659 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" event={"ID":"8a7085c1-72d8-4d94-ae0b-269f3b76a006","Type":"ContainerDied","Data":"34a3309c917dc0b1be4ffdf850f4bbe60b1a7286807bd9e8de75f93f93c381fc"} Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.087669 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.127389 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.127438 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.166676 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.262546 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.262611 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.273465 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-fernet-keys\") pod \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.273527 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn7wq\" (UniqueName: \"kubernetes.io/projected/8a7085c1-72d8-4d94-ae0b-269f3b76a006-kube-api-access-tn7wq\") pod \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.273560 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-config-data\") pod \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\" (UID: \"8a7085c1-72d8-4d94-ae0b-269f3b76a006\") " Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.279745 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a7085c1-72d8-4d94-ae0b-269f3b76a006-kube-api-access-tn7wq" (OuterVolumeSpecName: "kube-api-access-tn7wq") pod "8a7085c1-72d8-4d94-ae0b-269f3b76a006" (UID: "8a7085c1-72d8-4d94-ae0b-269f3b76a006"). InnerVolumeSpecName "kube-api-access-tn7wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.291172 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8a7085c1-72d8-4d94-ae0b-269f3b76a006" (UID: "8a7085c1-72d8-4d94-ae0b-269f3b76a006"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.324906 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-config-data" (OuterVolumeSpecName: "config-data") pod "8a7085c1-72d8-4d94-ae0b-269f3b76a006" (UID: "8a7085c1-72d8-4d94-ae0b-269f3b76a006"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.375370 4721 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.375420 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn7wq\" (UniqueName: \"kubernetes.io/projected/8a7085c1-72d8-4d94-ae0b-269f3b76a006-kube-api-access-tn7wq\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.375436 4721 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a7085c1-72d8-4d94-ae0b-269f3b76a006-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.833077 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" event={"ID":"8a7085c1-72d8-4d94-ae0b-269f3b76a006","Type":"ContainerDied","Data":"a17953c264a8a3759ab85286e1079af3f8d0414f7c72ec25dfcca6ab1ce4524f"} Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.833121 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-cron-29395561-x4cdw" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.833123 4721 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a17953c264a8a3759ab85286e1079af3f8d0414f7c72ec25dfcca6ab1ce4524f" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.899233 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:01:05 crc kubenswrapper[4721]: I1121 14:01:05.954109 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-784sn"] Nov 21 14:01:07 crc kubenswrapper[4721]: I1121 14:01:07.848794 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-784sn" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="registry-server" containerID="cri-o://901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a" gracePeriod=2 Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.251566 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.323669 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-catalog-content\") pod \"43c83dc3-937a-40b3-b4dc-b6c141021001\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.323819 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44xpq\" (UniqueName: \"kubernetes.io/projected/43c83dc3-937a-40b3-b4dc-b6c141021001-kube-api-access-44xpq\") pod \"43c83dc3-937a-40b3-b4dc-b6c141021001\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.323876 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-utilities\") pod \"43c83dc3-937a-40b3-b4dc-b6c141021001\" (UID: \"43c83dc3-937a-40b3-b4dc-b6c141021001\") " Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.324941 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-utilities" (OuterVolumeSpecName: "utilities") pod "43c83dc3-937a-40b3-b4dc-b6c141021001" (UID: "43c83dc3-937a-40b3-b4dc-b6c141021001"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.329580 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43c83dc3-937a-40b3-b4dc-b6c141021001-kube-api-access-44xpq" (OuterVolumeSpecName: "kube-api-access-44xpq") pod "43c83dc3-937a-40b3-b4dc-b6c141021001" (UID: "43c83dc3-937a-40b3-b4dc-b6c141021001"). InnerVolumeSpecName "kube-api-access-44xpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.341397 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43c83dc3-937a-40b3-b4dc-b6c141021001" (UID: "43c83dc3-937a-40b3-b4dc-b6c141021001"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.425705 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.425751 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44xpq\" (UniqueName: \"kubernetes.io/projected/43c83dc3-937a-40b3-b4dc-b6c141021001-kube-api-access-44xpq\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.425782 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43c83dc3-937a-40b3-b4dc-b6c141021001-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.859412 4721 generic.go:334] "Generic (PLEG): container finished" podID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerID="901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a" exitCode=0 Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.859466 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-784sn" event={"ID":"43c83dc3-937a-40b3-b4dc-b6c141021001","Type":"ContainerDied","Data":"901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a"} Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.859498 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-784sn" event={"ID":"43c83dc3-937a-40b3-b4dc-b6c141021001","Type":"ContainerDied","Data":"f06b4e15a885551dc213964ae3ff7dd5c04271911f85ea77585b3b8934009d2c"} Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.859518 4721 scope.go:117] "RemoveContainer" containerID="901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.859668 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-784sn" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.901778 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-784sn"] Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.908521 4721 scope.go:117] "RemoveContainer" containerID="97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.910035 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-784sn"] Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.924683 4721 scope.go:117] "RemoveContainer" containerID="31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.962604 4721 scope.go:117] "RemoveContainer" containerID="901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a" Nov 21 14:01:08 crc kubenswrapper[4721]: E1121 14:01:08.963474 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a\": container with ID starting with 901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a not found: ID does not exist" containerID="901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.963523 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a"} err="failed to get container status \"901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a\": rpc error: code = NotFound desc = could not find container \"901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a\": container with ID starting with 901f9efbc06176cb8981df88fab0b836ebfa17c4fc294972e7d712c70eaf1b3a not found: ID does not exist" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.963552 4721 scope.go:117] "RemoveContainer" containerID="97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f" Nov 21 14:01:08 crc kubenswrapper[4721]: E1121 14:01:08.964015 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f\": container with ID starting with 97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f not found: ID does not exist" containerID="97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.964131 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f"} err="failed to get container status \"97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f\": rpc error: code = NotFound desc = could not find container \"97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f\": container with ID starting with 97eaa8fdd4adf23f1f946a92c02d3624ab1cd096bb719d0d8ec5dc19e719383f not found: ID does not exist" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.964177 4721 scope.go:117] "RemoveContainer" containerID="31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501" Nov 21 14:01:08 crc kubenswrapper[4721]: E1121 14:01:08.964660 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501\": container with ID starting with 31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501 not found: ID does not exist" containerID="31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.964699 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501"} err="failed to get container status \"31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501\": rpc error: code = NotFound desc = could not find container \"31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501\": container with ID starting with 31da193939937244b1745b39f0045307393e53674997221977290143f5cdb501 not found: ID does not exist" Nov 21 14:01:08 crc kubenswrapper[4721]: I1121 14:01:08.993192 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" path="/var/lib/kubelet/pods/43c83dc3-937a-40b3-b4dc-b6c141021001/volumes" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.237979 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k4qns"] Nov 21 14:01:12 crc kubenswrapper[4721]: E1121 14:01:12.238547 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="registry-server" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.238559 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="registry-server" Nov 21 14:01:12 crc kubenswrapper[4721]: E1121 14:01:12.238570 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="extract-content" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.238576 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="extract-content" Nov 21 14:01:12 crc kubenswrapper[4721]: E1121 14:01:12.238587 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="extract-utilities" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.238594 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="extract-utilities" Nov 21 14:01:12 crc kubenswrapper[4721]: E1121 14:01:12.238603 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a7085c1-72d8-4d94-ae0b-269f3b76a006" containerName="keystone-cron" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.238609 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a7085c1-72d8-4d94-ae0b-269f3b76a006" containerName="keystone-cron" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.238750 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c83dc3-937a-40b3-b4dc-b6c141021001" containerName="registry-server" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.238765 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a7085c1-72d8-4d94-ae0b-269f3b76a006" containerName="keystone-cron" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.239724 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.251777 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4qns"] Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.386153 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-utilities\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.386210 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-catalog-content\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.386242 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42bmc\" (UniqueName: \"kubernetes.io/projected/d1e74e08-521a-40f1-835a-98d7cbf05533-kube-api-access-42bmc\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.487570 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42bmc\" (UniqueName: \"kubernetes.io/projected/d1e74e08-521a-40f1-835a-98d7cbf05533-kube-api-access-42bmc\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.487763 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-utilities\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.487820 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-catalog-content\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.488398 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-catalog-content\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.488627 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-utilities\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.510645 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42bmc\" (UniqueName: \"kubernetes.io/projected/d1e74e08-521a-40f1-835a-98d7cbf05533-kube-api-access-42bmc\") pod \"community-operators-k4qns\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.584384 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.869220 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4qns"] Nov 21 14:01:12 crc kubenswrapper[4721]: I1121 14:01:12.894462 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerStarted","Data":"7fafeaff171d36e31207ec4a50bd162811b0d03d9c6a5b1fe5c015f46f5a1710"} Nov 21 14:01:13 crc kubenswrapper[4721]: I1121 14:01:13.905764 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerID="081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b" exitCode=0 Nov 21 14:01:13 crc kubenswrapper[4721]: I1121 14:01:13.906242 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerDied","Data":"081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b"} Nov 21 14:01:14 crc kubenswrapper[4721]: I1121 14:01:14.920724 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerStarted","Data":"92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440"} Nov 21 14:01:15 crc kubenswrapper[4721]: I1121 14:01:15.927589 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerID="92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440" exitCode=0 Nov 21 14:01:15 crc kubenswrapper[4721]: I1121 14:01:15.927644 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerDied","Data":"92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440"} Nov 21 14:01:16 crc kubenswrapper[4721]: I1121 14:01:16.950264 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerStarted","Data":"345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f"} Nov 21 14:01:16 crc kubenswrapper[4721]: I1121 14:01:16.974334 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k4qns" podStartSLOduration=2.453603246 podStartE2EDuration="4.974319522s" podCreationTimestamp="2025-11-21 14:01:12 +0000 UTC" firstStartedPulling="2025-11-21 14:01:13.910060835 +0000 UTC m=+1511.887978974" lastFinishedPulling="2025-11-21 14:01:16.430803972 +0000 UTC m=+1514.408695250" observedRunningTime="2025-11-21 14:01:16.973228262 +0000 UTC m=+1514.951119540" watchObservedRunningTime="2025-11-21 14:01:16.974319522 +0000 UTC m=+1514.952210780" Nov 21 14:01:22 crc kubenswrapper[4721]: I1121 14:01:22.585371 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:22 crc kubenswrapper[4721]: I1121 14:01:22.586910 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:22 crc kubenswrapper[4721]: I1121 14:01:22.642168 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:23 crc kubenswrapper[4721]: I1121 14:01:23.035726 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:23 crc kubenswrapper[4721]: I1121 14:01:23.088003 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k4qns"] Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.014559 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k4qns" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="registry-server" containerID="cri-o://345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f" gracePeriod=2 Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.459666 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.488198 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-utilities\") pod \"d1e74e08-521a-40f1-835a-98d7cbf05533\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.488262 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-catalog-content\") pod \"d1e74e08-521a-40f1-835a-98d7cbf05533\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.488335 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42bmc\" (UniqueName: \"kubernetes.io/projected/d1e74e08-521a-40f1-835a-98d7cbf05533-kube-api-access-42bmc\") pod \"d1e74e08-521a-40f1-835a-98d7cbf05533\" (UID: \"d1e74e08-521a-40f1-835a-98d7cbf05533\") " Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.489177 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-utilities" (OuterVolumeSpecName: "utilities") pod "d1e74e08-521a-40f1-835a-98d7cbf05533" (UID: "d1e74e08-521a-40f1-835a-98d7cbf05533"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.489508 4721 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.496804 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1e74e08-521a-40f1-835a-98d7cbf05533-kube-api-access-42bmc" (OuterVolumeSpecName: "kube-api-access-42bmc") pod "d1e74e08-521a-40f1-835a-98d7cbf05533" (UID: "d1e74e08-521a-40f1-835a-98d7cbf05533"). InnerVolumeSpecName "kube-api-access-42bmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:01:25 crc kubenswrapper[4721]: I1121 14:01:25.590770 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42bmc\" (UniqueName: \"kubernetes.io/projected/d1e74e08-521a-40f1-835a-98d7cbf05533-kube-api-access-42bmc\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.023417 4721 generic.go:334] "Generic (PLEG): container finished" podID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerID="345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f" exitCode=0 Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.023498 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4qns" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.023527 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerDied","Data":"345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f"} Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.023724 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4qns" event={"ID":"d1e74e08-521a-40f1-835a-98d7cbf05533","Type":"ContainerDied","Data":"7fafeaff171d36e31207ec4a50bd162811b0d03d9c6a5b1fe5c015f46f5a1710"} Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.023748 4721 scope.go:117] "RemoveContainer" containerID="345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.041053 4721 scope.go:117] "RemoveContainer" containerID="92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.064407 4721 scope.go:117] "RemoveContainer" containerID="081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.084725 4721 scope.go:117] "RemoveContainer" containerID="345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f" Nov 21 14:01:26 crc kubenswrapper[4721]: E1121 14:01:26.085167 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f\": container with ID starting with 345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f not found: ID does not exist" containerID="345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.085222 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f"} err="failed to get container status \"345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f\": rpc error: code = NotFound desc = could not find container \"345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f\": container with ID starting with 345c195e742d48bbbf83bc308561786749488395dfda8dbff04cd7932638b26f not found: ID does not exist" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.085247 4721 scope.go:117] "RemoveContainer" containerID="92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440" Nov 21 14:01:26 crc kubenswrapper[4721]: E1121 14:01:26.085512 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440\": container with ID starting with 92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440 not found: ID does not exist" containerID="92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.086181 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440"} err="failed to get container status \"92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440\": rpc error: code = NotFound desc = could not find container \"92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440\": container with ID starting with 92342e11c343bd20fdc5324cc11b2e7e2c8280513f84352b68c1ef92a6c30440 not found: ID does not exist" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.086219 4721 scope.go:117] "RemoveContainer" containerID="081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b" Nov 21 14:01:26 crc kubenswrapper[4721]: E1121 14:01:26.086465 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b\": container with ID starting with 081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b not found: ID does not exist" containerID="081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.086492 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b"} err="failed to get container status \"081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b\": rpc error: code = NotFound desc = could not find container \"081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b\": container with ID starting with 081b03a9414db17aabc7844828e3bedb281668b30eaabd52f234e22cd2e11b3b not found: ID does not exist" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.494461 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1e74e08-521a-40f1-835a-98d7cbf05533" (UID: "d1e74e08-521a-40f1-835a-98d7cbf05533"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.502915 4721 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1e74e08-521a-40f1-835a-98d7cbf05533-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.657981 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k4qns"] Nov 21 14:01:26 crc kubenswrapper[4721]: I1121 14:01:26.668380 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k4qns"] Nov 21 14:01:27 crc kubenswrapper[4721]: I1121 14:01:27.003954 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" path="/var/lib/kubelet/pods/d1e74e08-521a-40f1-835a-98d7cbf05533/volumes" Nov 21 14:01:34 crc kubenswrapper[4721]: I1121 14:01:34.104722 4721 generic.go:334] "Generic (PLEG): container finished" podID="1370b67c-066a-43da-933e-fff17d0b0fae" containerID="6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7" exitCode=0 Nov 21 14:01:34 crc kubenswrapper[4721]: I1121 14:01:34.104965 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-5v8sx/must-gather-44tfc" event={"ID":"1370b67c-066a-43da-933e-fff17d0b0fae","Type":"ContainerDied","Data":"6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7"} Nov 21 14:01:34 crc kubenswrapper[4721]: I1121 14:01:34.106244 4721 scope.go:117] "RemoveContainer" containerID="6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7" Nov 21 14:01:34 crc kubenswrapper[4721]: I1121 14:01:34.583057 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5v8sx_must-gather-44tfc_1370b67c-066a-43da-933e-fff17d0b0fae/gather/0.log" Nov 21 14:01:35 crc kubenswrapper[4721]: I1121 14:01:35.262227 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:01:35 crc kubenswrapper[4721]: I1121 14:01:35.262669 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.214247 4721 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-5v8sx/must-gather-44tfc"] Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.215140 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-5v8sx/must-gather-44tfc" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="copy" containerID="cri-o://9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7" gracePeriod=2 Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.218944 4721 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-5v8sx/must-gather-44tfc"] Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.646231 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5v8sx_must-gather-44tfc_1370b67c-066a-43da-933e-fff17d0b0fae/copy/0.log" Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.646997 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.825881 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370b67c-066a-43da-933e-fff17d0b0fae-must-gather-output\") pod \"1370b67c-066a-43da-933e-fff17d0b0fae\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.826441 4721 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlf7j\" (UniqueName: \"kubernetes.io/projected/1370b67c-066a-43da-933e-fff17d0b0fae-kube-api-access-jlf7j\") pod \"1370b67c-066a-43da-933e-fff17d0b0fae\" (UID: \"1370b67c-066a-43da-933e-fff17d0b0fae\") " Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.832307 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1370b67c-066a-43da-933e-fff17d0b0fae-kube-api-access-jlf7j" (OuterVolumeSpecName: "kube-api-access-jlf7j") pod "1370b67c-066a-43da-933e-fff17d0b0fae" (UID: "1370b67c-066a-43da-933e-fff17d0b0fae"). InnerVolumeSpecName "kube-api-access-jlf7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.893265 4721 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1370b67c-066a-43da-933e-fff17d0b0fae-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "1370b67c-066a-43da-933e-fff17d0b0fae" (UID: "1370b67c-066a-43da-933e-fff17d0b0fae"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.928472 4721 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370b67c-066a-43da-933e-fff17d0b0fae-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:41 crc kubenswrapper[4721]: I1121 14:01:41.928513 4721 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlf7j\" (UniqueName: \"kubernetes.io/projected/1370b67c-066a-43da-933e-fff17d0b0fae-kube-api-access-jlf7j\") on node \"crc\" DevicePath \"\"" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.170730 4721 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-5v8sx_must-gather-44tfc_1370b67c-066a-43da-933e-fff17d0b0fae/copy/0.log" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.171248 4721 generic.go:334] "Generic (PLEG): container finished" podID="1370b67c-066a-43da-933e-fff17d0b0fae" containerID="9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7" exitCode=143 Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.171304 4721 scope.go:117] "RemoveContainer" containerID="9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.171311 4721 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-5v8sx/must-gather-44tfc" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.201683 4721 scope.go:117] "RemoveContainer" containerID="6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.242193 4721 scope.go:117] "RemoveContainer" containerID="9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7" Nov 21 14:01:42 crc kubenswrapper[4721]: E1121 14:01:42.242699 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7\": container with ID starting with 9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7 not found: ID does not exist" containerID="9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.242726 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7"} err="failed to get container status \"9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7\": rpc error: code = NotFound desc = could not find container \"9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7\": container with ID starting with 9121dde909cda789cabe1521fa465474140029d7f354019254e4d27f4614e4f7 not found: ID does not exist" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.242745 4721 scope.go:117] "RemoveContainer" containerID="6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7" Nov 21 14:01:42 crc kubenswrapper[4721]: E1121 14:01:42.243162 4721 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7\": container with ID starting with 6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7 not found: ID does not exist" containerID="6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.243184 4721 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7"} err="failed to get container status \"6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7\": rpc error: code = NotFound desc = could not find container \"6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7\": container with ID starting with 6845afb1ecd60cee4b1e9160d8a82a663e0901422b0ec00fde81bd132bc4b2c7 not found: ID does not exist" Nov 21 14:01:42 crc kubenswrapper[4721]: I1121 14:01:42.994968 4721 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" path="/var/lib/kubelet/pods/1370b67c-066a-43da-933e-fff17d0b0fae/volumes" Nov 21 14:02:05 crc kubenswrapper[4721]: I1121 14:02:05.262209 4721 patch_prober.go:28] interesting pod/machine-config-daemon-mstbk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:02:05 crc kubenswrapper[4721]: I1121 14:02:05.263920 4721 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:02:05 crc kubenswrapper[4721]: I1121 14:02:05.264044 4721 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" Nov 21 14:02:05 crc kubenswrapper[4721]: I1121 14:02:05.264598 4721 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59"} pod="openshift-machine-config-operator/machine-config-daemon-mstbk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:02:05 crc kubenswrapper[4721]: I1121 14:02:05.264734 4721 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" containerName="machine-config-daemon" containerID="cri-o://bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" gracePeriod=600 Nov 21 14:02:05 crc kubenswrapper[4721]: E1121 14:02:05.388382 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:02:06 crc kubenswrapper[4721]: I1121 14:02:06.341789 4721 generic.go:334] "Generic (PLEG): container finished" podID="fe3943c5-1349-4275-bc45-25db418c7488" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" exitCode=0 Nov 21 14:02:06 crc kubenswrapper[4721]: I1121 14:02:06.341837 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" event={"ID":"fe3943c5-1349-4275-bc45-25db418c7488","Type":"ContainerDied","Data":"bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59"} Nov 21 14:02:06 crc kubenswrapper[4721]: I1121 14:02:06.341875 4721 scope.go:117] "RemoveContainer" containerID="a1c015fb42138002e1bbf6657d93a4f584890b9fe82b0b256353c923999d7d76" Nov 21 14:02:06 crc kubenswrapper[4721]: I1121 14:02:06.342425 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:02:06 crc kubenswrapper[4721]: E1121 14:02:06.342730 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:02:17 crc kubenswrapper[4721]: I1121 14:02:17.982112 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:02:17 crc kubenswrapper[4721]: E1121 14:02:17.983278 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:02:29 crc kubenswrapper[4721]: I1121 14:02:29.981900 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:02:29 crc kubenswrapper[4721]: E1121 14:02:29.982660 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:02:40 crc kubenswrapper[4721]: I1121 14:02:40.982329 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:02:40 crc kubenswrapper[4721]: E1121 14:02:40.983107 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:02:52 crc kubenswrapper[4721]: I1121 14:02:52.984912 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:02:52 crc kubenswrapper[4721]: E1121 14:02:52.985805 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:03:06 crc kubenswrapper[4721]: I1121 14:03:06.981418 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:03:06 crc kubenswrapper[4721]: E1121 14:03:06.982265 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:03:20 crc kubenswrapper[4721]: I1121 14:03:20.981347 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:03:20 crc kubenswrapper[4721]: E1121 14:03:20.982265 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:03:35 crc kubenswrapper[4721]: I1121 14:03:35.982378 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:03:35 crc kubenswrapper[4721]: E1121 14:03:35.983640 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:03:47 crc kubenswrapper[4721]: I1121 14:03:47.982421 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:03:47 crc kubenswrapper[4721]: E1121 14:03:47.983445 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:04:02 crc kubenswrapper[4721]: I1121 14:04:02.982792 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:04:02 crc kubenswrapper[4721]: E1121 14:04:02.983689 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:04:13 crc kubenswrapper[4721]: I1121 14:04:13.981704 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:04:13 crc kubenswrapper[4721]: E1121 14:04:13.982850 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:04:25 crc kubenswrapper[4721]: I1121 14:04:25.982650 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:04:25 crc kubenswrapper[4721]: E1121 14:04:25.983636 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:04:38 crc kubenswrapper[4721]: I1121 14:04:38.981775 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:04:38 crc kubenswrapper[4721]: E1121 14:04:38.985217 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:04:51 crc kubenswrapper[4721]: I1121 14:04:51.981336 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:04:51 crc kubenswrapper[4721]: E1121 14:04:51.982446 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:05:02 crc kubenswrapper[4721]: I1121 14:05:02.986853 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:05:02 crc kubenswrapper[4721]: E1121 14:05:02.987763 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.077369 4721 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5xgvc"] Nov 21 14:05:04 crc kubenswrapper[4721]: E1121 14:05:04.079275 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="extract-utilities" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.079518 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="extract-utilities" Nov 21 14:05:04 crc kubenswrapper[4721]: E1121 14:05:04.079705 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="registry-server" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.079858 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="registry-server" Nov 21 14:05:04 crc kubenswrapper[4721]: E1121 14:05:04.080110 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="extract-content" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.080317 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="extract-content" Nov 21 14:05:04 crc kubenswrapper[4721]: E1121 14:05:04.080520 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="copy" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.083500 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="copy" Nov 21 14:05:04 crc kubenswrapper[4721]: E1121 14:05:04.083540 4721 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="gather" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.083548 4721 state_mem.go:107] "Deleted CPUSet assignment" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="gather" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.083838 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="gather" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.083859 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="1370b67c-066a-43da-933e-fff17d0b0fae" containerName="copy" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.083869 4721 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1e74e08-521a-40f1-835a-98d7cbf05533" containerName="registry-server" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.085079 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.087766 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xgvc"] Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.219518 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c85fd98-0a74-451a-aafb-54ed0e13222d-catalog-content\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.219563 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c85fd98-0a74-451a-aafb-54ed0e13222d-utilities\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.219636 4721 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnmrn\" (UniqueName: \"kubernetes.io/projected/0c85fd98-0a74-451a-aafb-54ed0e13222d-kube-api-access-tnmrn\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.321040 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c85fd98-0a74-451a-aafb-54ed0e13222d-utilities\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.321115 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c85fd98-0a74-451a-aafb-54ed0e13222d-catalog-content\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.321188 4721 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnmrn\" (UniqueName: \"kubernetes.io/projected/0c85fd98-0a74-451a-aafb-54ed0e13222d-kube-api-access-tnmrn\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.321566 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c85fd98-0a74-451a-aafb-54ed0e13222d-utilities\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.321912 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c85fd98-0a74-451a-aafb-54ed0e13222d-catalog-content\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.355299 4721 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnmrn\" (UniqueName: \"kubernetes.io/projected/0c85fd98-0a74-451a-aafb-54ed0e13222d-kube-api-access-tnmrn\") pod \"redhat-operators-5xgvc\" (UID: \"0c85fd98-0a74-451a-aafb-54ed0e13222d\") " pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.401389 4721 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:04 crc kubenswrapper[4721]: I1121 14:05:04.835782 4721 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xgvc"] Nov 21 14:05:05 crc kubenswrapper[4721]: I1121 14:05:05.817304 4721 generic.go:334] "Generic (PLEG): container finished" podID="0c85fd98-0a74-451a-aafb-54ed0e13222d" containerID="0dc7e9861eef0ca6f568cd5a103b07ddfac9651ca064ea5fdfd21f5f90213162" exitCode=0 Nov 21 14:05:05 crc kubenswrapper[4721]: I1121 14:05:05.817363 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xgvc" event={"ID":"0c85fd98-0a74-451a-aafb-54ed0e13222d","Type":"ContainerDied","Data":"0dc7e9861eef0ca6f568cd5a103b07ddfac9651ca064ea5fdfd21f5f90213162"} Nov 21 14:05:05 crc kubenswrapper[4721]: I1121 14:05:05.817855 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xgvc" event={"ID":"0c85fd98-0a74-451a-aafb-54ed0e13222d","Type":"ContainerStarted","Data":"bc112344327064f25a8b783afbb480eb9c5f2c7054e5f12332d3c4853d6735fd"} Nov 21 14:05:07 crc kubenswrapper[4721]: I1121 14:05:07.834677 4721 generic.go:334] "Generic (PLEG): container finished" podID="0c85fd98-0a74-451a-aafb-54ed0e13222d" containerID="7350c7f9d5fc617507ee7ff40e59348bc5cd0c29fe5590fb1fc7c3ad7422b2e9" exitCode=0 Nov 21 14:05:07 crc kubenswrapper[4721]: I1121 14:05:07.834988 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xgvc" event={"ID":"0c85fd98-0a74-451a-aafb-54ed0e13222d","Type":"ContainerDied","Data":"7350c7f9d5fc617507ee7ff40e59348bc5cd0c29fe5590fb1fc7c3ad7422b2e9"} Nov 21 14:05:07 crc kubenswrapper[4721]: I1121 14:05:07.837210 4721 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:05:08 crc kubenswrapper[4721]: I1121 14:05:08.844788 4721 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xgvc" event={"ID":"0c85fd98-0a74-451a-aafb-54ed0e13222d","Type":"ContainerStarted","Data":"3d6ffa6b8ffaec5db231eb5fe21a3276272ec63ee946b9f4c5afba4becd52a78"} Nov 21 14:05:08 crc kubenswrapper[4721]: I1121 14:05:08.859395 4721 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5xgvc" podStartSLOduration=2.366778015 podStartE2EDuration="4.859371333s" podCreationTimestamp="2025-11-21 14:05:04 +0000 UTC" firstStartedPulling="2025-11-21 14:05:05.819731979 +0000 UTC m=+1743.797623237" lastFinishedPulling="2025-11-21 14:05:08.312325307 +0000 UTC m=+1746.290216555" observedRunningTime="2025-11-21 14:05:08.858376957 +0000 UTC m=+1746.836268215" watchObservedRunningTime="2025-11-21 14:05:08.859371333 +0000 UTC m=+1746.837262591" Nov 21 14:05:14 crc kubenswrapper[4721]: I1121 14:05:14.402430 4721 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:14 crc kubenswrapper[4721]: I1121 14:05:14.402846 4721 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5xgvc" Nov 21 14:05:14 crc kubenswrapper[4721]: I1121 14:05:14.981840 4721 scope.go:117] "RemoveContainer" containerID="bf613cedd13a9689bd43ccd46e14f24a5ed5a9a6aff6fa123ea297718f0efb59" Nov 21 14:05:14 crc kubenswrapper[4721]: E1121 14:05:14.982142 4721 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-mstbk_openshift-machine-config-operator(fe3943c5-1349-4275-bc45-25db418c7488)\"" pod="openshift-machine-config-operator/machine-config-daemon-mstbk" podUID="fe3943c5-1349-4275-bc45-25db418c7488" Nov 21 14:05:15 crc kubenswrapper[4721]: I1121 14:05:15.458737 4721 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5xgvc" podUID="0c85fd98-0a74-451a-aafb-54ed0e13222d" containerName="registry-server" probeResult="failure" output=< Nov 21 14:05:15 crc kubenswrapper[4721]: timeout: failed to connect service ":50051" within 1s Nov 21 14:05:15 crc kubenswrapper[4721]: > var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515110070642024441 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015110070643017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015110065004016475 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015110065005015446 5ustar corecore